From 9b834aa4436b880a43e0bcc8cd8161d2906929e7 Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Mon, 26 Jun 2023 03:47:21 -0700 Subject: [PATCH 01/17] Update README to be more clear about use of the different files in the repo This is intended to clarify that developers using Vulkan are likely to need only the header files, not the scripts and other material in the repository which are used by the SDK and other downstream ecosystem components. Closes #407 --- README.md | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 9c93c30..b2e830f 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,27 @@ Vulkan header files and API registry +This repository contains Vulkan header files, include files for C and C++, +and related scripts and tests. + +Most of the files in this repository are sourced from, or generated from, +other repositories as described in [CONTRIBUTING.md](CONTRIBUTING.md). +Vulkan-Headers exists as a staging area for these files, most of which are +then consumed by downstream repositories used to build SDK components such +as the Vulkan Validation Layers and Conformance Test Suite. + +Developers normally obtain headers from the official +[Vulkan-SDK](https://www.lunarg.com/vulkan-sdk/). +They can also use headers from, or packaged from, this repository. + +In most cases, developers should only need the headers, not the scripts and +other material in this repository. +In particular if you are packaging the headers for inclusion in Linux +distributions or similar uses, we advise that you do not include the +scripts, and direct potential users of the scripts to their canonical +sources in the [Vulkan Specification +repository](https://github.com/KhronosGroup/Vulkan-Docs). + ## Contributing See the [CONTRIBUTING.md](CONTRIBUTING.md) @@ -22,8 +43,8 @@ Vulkan-Headers are shipped as part of the official [Vulkan-SDK](https://www.luna Updates to the `Vulkan-Headers` repository which correspond to a new Vulkan specification release are tagged using the following format: -`v<`_`version`_`>` (e.g., `v1.1.96`). +`v<`_`version`_`>` (e.g., `v1.3.255`). **Note**: Marked version releases have undergone thorough testing but do not imply the same quality level as SDK tags. SDK tags follow the -`sdk-<`_`version`_`>.<`_`patch`_`>` format (e.g., `sdk-1.1.92.0`). +`sdk-<`_`version`_`>.<`_`patch`_`>` format (e.g., `sdk-1.3.250.0`). From ed857118e243fdc0f3a100f00ac9919e874cfe63 Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Fri, 30 Jun 2023 06:29:15 -0700 Subject: [PATCH 02/17] Update for Vulkan-Docs 1.3.256 --- include/vulkan/vulkan.cppm | 2926 +++++++++++++++++++++++ include/vulkan/vulkan.hpp | 118 +- include/vulkan/vulkan_core.h | 5 +- include/vulkan/vulkan_enums.hpp | 3 +- include/vulkan/vulkan_format_traits.hpp | 12 +- include/vulkan/vulkan_raii.hpp | 565 ++--- include/vulkan/vulkan_to_string.hpp | 6 +- registry/validusage.json | 182 +- registry/vk.xml | 57 +- 9 files changed, 3439 insertions(+), 435 deletions(-) create mode 100644 include/vulkan/vulkan.cppm diff --git a/include/vulkan/vulkan.cppm b/include/vulkan/vulkan.cppm new file mode 100644 index 0000000..43b9c4e --- /dev/null +++ b/include/vulkan/vulkan.cppm @@ -0,0 +1,2926 @@ +// Copyright 2015-2023 The Khronos Group Inc. +// +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +module; + +#include +#include +#include +#include +#include + +export module vulkan; + +VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE + +export namespace VULKAN_HPP_NAMESPACE +{ + //===================================== + //=== HARDCODED TYPEs AND FUNCTIONs === + //===================================== + using VULKAN_HPP_NAMESPACE::ArrayWrapper1D; + using VULKAN_HPP_NAMESPACE::ArrayWrapper2D; + using VULKAN_HPP_NAMESPACE::DispatchLoaderBase; + using VULKAN_HPP_NAMESPACE::Flags; + using VULKAN_HPP_NAMESPACE::FlagTraits; + +#if !defined( VK_NO_PROTOTYPES ) + using VULKAN_HPP_NAMESPACE::DispatchLoaderStatic; +#endif /*VK_NO_PROTOTYPES*/ + + using VULKAN_HPP_NAMESPACE::operator&; + using VULKAN_HPP_NAMESPACE::operator|; + using VULKAN_HPP_NAMESPACE::operator^; + using VULKAN_HPP_NAMESPACE::operator~; + using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE; + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + using VULKAN_HPP_NAMESPACE::ArrayProxy; + using VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries; + using VULKAN_HPP_NAMESPACE::Optional; + using VULKAN_HPP_NAMESPACE::StridedArrayProxy; + using VULKAN_HPP_NAMESPACE::StructureChain; + using VULKAN_HPP_NAMESPACE::UniqueHandle; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + using VULKAN_HPP_NAMESPACE::ObjectDestroy; + using VULKAN_HPP_NAMESPACE::ObjectFree; + using VULKAN_HPP_NAMESPACE::ObjectRelease; + using VULKAN_HPP_NAMESPACE::PoolFree; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + //================== + //=== BASE TYPEs === + //================== + using VULKAN_HPP_NAMESPACE::Bool32; + using VULKAN_HPP_NAMESPACE::DeviceAddress; + using VULKAN_HPP_NAMESPACE::DeviceSize; + using VULKAN_HPP_NAMESPACE::RemoteAddressNV; + using VULKAN_HPP_NAMESPACE::SampleMask; + + //============= + //=== ENUMs === + //============= + using VULKAN_HPP_NAMESPACE::CppType; + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::AccessFlagBits; + using VULKAN_HPP_NAMESPACE::AccessFlags; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlagBits; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags; + using VULKAN_HPP_NAMESPACE::AttachmentLoadOp; + using VULKAN_HPP_NAMESPACE::AttachmentStoreOp; + using VULKAN_HPP_NAMESPACE::BlendFactor; + using VULKAN_HPP_NAMESPACE::BlendOp; + using VULKAN_HPP_NAMESPACE::BorderColor; + using VULKAN_HPP_NAMESPACE::BufferCreateFlagBits; + using VULKAN_HPP_NAMESPACE::BufferCreateFlags; + using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags; + using VULKAN_HPP_NAMESPACE::BufferViewCreateFlagBits; + using VULKAN_HPP_NAMESPACE::BufferViewCreateFlags; + using VULKAN_HPP_NAMESPACE::ColorComponentFlagBits; + using VULKAN_HPP_NAMESPACE::ColorComponentFlags; + using VULKAN_HPP_NAMESPACE::CommandBufferLevel; + using VULKAN_HPP_NAMESPACE::CommandBufferResetFlagBits; + using VULKAN_HPP_NAMESPACE::CommandBufferResetFlags; + using VULKAN_HPP_NAMESPACE::CommandBufferUsageFlagBits; + using VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags; + using VULKAN_HPP_NAMESPACE::CommandPoolCreateFlagBits; + using VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags; + using VULKAN_HPP_NAMESPACE::CommandPoolResetFlagBits; + using VULKAN_HPP_NAMESPACE::CommandPoolResetFlags; + using VULKAN_HPP_NAMESPACE::CompareOp; + using VULKAN_HPP_NAMESPACE::ComponentSwizzle; + using VULKAN_HPP_NAMESPACE::CullModeFlagBits; + using VULKAN_HPP_NAMESPACE::CullModeFlags; + using VULKAN_HPP_NAMESPACE::DependencyFlagBits; + using VULKAN_HPP_NAMESPACE::DependencyFlags; + using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags; + using VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags; + using VULKAN_HPP_NAMESPACE::DescriptorType; + using VULKAN_HPP_NAMESPACE::DeviceCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DeviceCreateFlags; + using VULKAN_HPP_NAMESPACE::DynamicState; + using VULKAN_HPP_NAMESPACE::EventCreateFlagBits; + using VULKAN_HPP_NAMESPACE::EventCreateFlags; + using VULKAN_HPP_NAMESPACE::FenceCreateFlagBits; + using VULKAN_HPP_NAMESPACE::FenceCreateFlags; + using VULKAN_HPP_NAMESPACE::Filter; + using VULKAN_HPP_NAMESPACE::Format; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlags; + using VULKAN_HPP_NAMESPACE::FramebufferCreateFlagBits; + using VULKAN_HPP_NAMESPACE::FramebufferCreateFlags; + using VULKAN_HPP_NAMESPACE::FrontFace; + using VULKAN_HPP_NAMESPACE::ImageAspectFlagBits; + using VULKAN_HPP_NAMESPACE::ImageAspectFlags; + using VULKAN_HPP_NAMESPACE::ImageCreateFlagBits; + using VULKAN_HPP_NAMESPACE::ImageCreateFlags; + using VULKAN_HPP_NAMESPACE::ImageLayout; + using VULKAN_HPP_NAMESPACE::ImageTiling; + using VULKAN_HPP_NAMESPACE::ImageType; + using VULKAN_HPP_NAMESPACE::ImageUsageFlagBits; + using VULKAN_HPP_NAMESPACE::ImageUsageFlags; + using VULKAN_HPP_NAMESPACE::ImageViewCreateFlagBits; + using VULKAN_HPP_NAMESPACE::ImageViewCreateFlags; + using VULKAN_HPP_NAMESPACE::ImageViewType; + using VULKAN_HPP_NAMESPACE::IndexType; + using VULKAN_HPP_NAMESPACE::InstanceCreateFlagBits; + using VULKAN_HPP_NAMESPACE::InstanceCreateFlags; + using VULKAN_HPP_NAMESPACE::InternalAllocationType; + using VULKAN_HPP_NAMESPACE::LogicOp; + using VULKAN_HPP_NAMESPACE::MemoryHeapFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryHeapFlags; + using VULKAN_HPP_NAMESPACE::MemoryMapFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryMapFlags; + using VULKAN_HPP_NAMESPACE::MemoryPropertyFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryPropertyFlags; + using VULKAN_HPP_NAMESPACE::ObjectType; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceType; + using VULKAN_HPP_NAMESPACE::PipelineBindPoint; + using VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineStageFlags; + using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PolygonMode; + using VULKAN_HPP_NAMESPACE::PrimitiveTopology; + using VULKAN_HPP_NAMESPACE::QueryControlFlagBits; + using VULKAN_HPP_NAMESPACE::QueryControlFlags; + using VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlagBits; + using VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlagBits; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags; + using VULKAN_HPP_NAMESPACE::QueryResultFlagBits; + using VULKAN_HPP_NAMESPACE::QueryResultFlags; + using VULKAN_HPP_NAMESPACE::QueryType; + using VULKAN_HPP_NAMESPACE::QueueFlagBits; + using VULKAN_HPP_NAMESPACE::QueueFlags; + using VULKAN_HPP_NAMESPACE::RenderPassCreateFlagBits; + using VULKAN_HPP_NAMESPACE::RenderPassCreateFlags; + using VULKAN_HPP_NAMESPACE::Result; + using VULKAN_HPP_NAMESPACE::SampleCountFlagBits; + using VULKAN_HPP_NAMESPACE::SampleCountFlags; + using VULKAN_HPP_NAMESPACE::SamplerAddressMode; + using VULKAN_HPP_NAMESPACE::SamplerCreateFlagBits; + using VULKAN_HPP_NAMESPACE::SamplerCreateFlags; + using VULKAN_HPP_NAMESPACE::SamplerMipmapMode; + using VULKAN_HPP_NAMESPACE::SemaphoreCreateFlagBits; + using VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags; + using VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlagBits; + using VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags; + using VULKAN_HPP_NAMESPACE::ShaderStageFlagBits; + using VULKAN_HPP_NAMESPACE::ShaderStageFlags; + using VULKAN_HPP_NAMESPACE::SharingMode; + using VULKAN_HPP_NAMESPACE::SparseImageFormatFlagBits; + using VULKAN_HPP_NAMESPACE::SparseImageFormatFlags; + using VULKAN_HPP_NAMESPACE::SparseMemoryBindFlagBits; + using VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags; + using VULKAN_HPP_NAMESPACE::StencilFaceFlagBits; + using VULKAN_HPP_NAMESPACE::StencilFaceFlags; + using VULKAN_HPP_NAMESPACE::StencilOp; + using VULKAN_HPP_NAMESPACE::StructureType; + using VULKAN_HPP_NAMESPACE::SubpassContents; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionFlagBits; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags; + using VULKAN_HPP_NAMESPACE::SystemAllocationScope; + using VULKAN_HPP_NAMESPACE::VendorId; + using VULKAN_HPP_NAMESPACE::VertexInputRate; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::ChromaLocation; + using VULKAN_HPP_NAMESPACE::ChromaLocationKHR; + using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlagBits; + using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateTypeKHR; + using VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlagBits; + using VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags; + using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::FenceImportFlagBits; + using VULKAN_HPP_NAMESPACE::FenceImportFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::FenceImportFlags; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlags; + using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags; + using VULKAN_HPP_NAMESPACE::PointClippingBehavior; + using VULKAN_HPP_NAMESPACE::PointClippingBehaviorKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversionKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrRange; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrRangeKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagBits; + using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreImportFlags; + using VULKAN_HPP_NAMESPACE::SubgroupFeatureFlagBits; + using VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags; + using VULKAN_HPP_NAMESPACE::TessellationDomainOrigin; + using VULKAN_HPP_NAMESPACE::TessellationDomainOriginKHR; + + //=== VK_VERSION_1_2 === + using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagBits; + using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DescriptorBindingFlags; + using VULKAN_HPP_NAMESPACE::DriverId; + using VULKAN_HPP_NAMESPACE::DriverIdKHR; + using VULKAN_HPP_NAMESPACE::ResolveModeFlagBits; + using VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::ResolveModeFlags; + using VULKAN_HPP_NAMESPACE::SamplerReductionMode; + using VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT; + using VULKAN_HPP_NAMESPACE::SemaphoreType; + using VULKAN_HPP_NAMESPACE::SemaphoreTypeKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagBits; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags; + using VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence; + using VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::AccessFlagBits2; + using VULKAN_HPP_NAMESPACE::AccessFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::AccessFlags2; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits2; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlags2; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags; + using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits2; + using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::PipelineStageFlags2; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags; + using VULKAN_HPP_NAMESPACE::RenderingFlagBits; + using VULKAN_HPP_NAMESPACE::RenderingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::RenderingFlags; + using VULKAN_HPP_NAMESPACE::SubmitFlagBits; + using VULKAN_HPP_NAMESPACE::SubmitFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SubmitFlags; + using VULKAN_HPP_NAMESPACE::ToolPurposeFlagBits; + using VULKAN_HPP_NAMESPACE::ToolPurposeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ToolPurposeFlags; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::ColorSpaceKHR; + using VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR; + using VULKAN_HPP_NAMESPACE::PresentModeKHR; + using VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR; + using VULKAN_HPP_NAMESPACE::SwapchainCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR; + using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::DebugReportFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT; + + //=== VK_AMD_rasterization_order === + using VULKAN_HPP_NAMESPACE::RasterizationOrderAMD; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::QueryResultStatusKHR; + using VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoCapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodingControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEndCodingFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR; + + //=== VK_KHR_video_decode_queue === + using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR; + + //=== VK_EXT_transform_feedback === + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h264 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsEXT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h265 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_video_decode_h264 === + using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsKHR; + + //=== VK_AMD_shader_info === + using VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagBitsGGP; + using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBitsNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBitsNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV; + + //=== VK_EXT_validation_flags === + using VULKAN_HPP_NAMESPACE::ValidationCheckEXT; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + using VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagBitsNN; + using VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_pipeline_robustness === + using VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT; + + //=== VK_EXT_conditional_rendering === + using VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT; + + //=== VK_EXT_display_surface_counter === + using VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT; + + //=== VK_EXT_display_control === + using VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT; + using VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT; + using VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT; + + //=== VK_NV_viewport_swizzle === + using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV; + using VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV; + + //=== VK_EXT_discard_rectangles === + using VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT; + using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT; + + //=== VK_EXT_conservative_rasterization === + using VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT; + + //=== VK_EXT_depth_clip_enable === + using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT; + + //=== VK_KHR_performance_query === + using VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagBitsMVK; + using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagBitsMVK; + using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT; + + //=== VK_EXT_blend_operation_advanced === + using VULKAN_HPP_NAMESPACE::BlendOverlapEXT; + + //=== VK_NV_fragment_coverage_to_color === + using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagBitsNV; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV; + using VULKAN_HPP_NAMESPACE::GeometryFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::GeometryFlagBitsNV; + using VULKAN_HPP_NAMESPACE::GeometryFlagsKHR; + using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagBitsNV; + using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR; + using VULKAN_HPP_NAMESPACE::GeometryTypeKHR; + using VULKAN_HPP_NAMESPACE::GeometryTypeNV; + + //=== VK_KHR_ray_tracing_pipeline === + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR; + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV; + using VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR; + + //=== VK_NV_framebuffer_mixed_samples === + using VULKAN_HPP_NAMESPACE::CoverageModulationModeNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT; + using VULKAN_HPP_NAMESPACE::ValidationCacheHeaderVersionEXT; + + //=== VK_NV_shading_rate_image === + using VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV; + using VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV; + + //=== VK_AMD_pipeline_compiler_control === + using VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagBitsAMD; + using VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD; + + //=== VK_EXT_calibrated_timestamps === + using VULKAN_HPP_NAMESPACE::TimeDomainEXT; + + //=== VK_KHR_global_priority === + using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT; + using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR; + + //=== VK_AMD_memory_overallocation_behavior === + using VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL; + using VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagBitsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + using VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR; + + //=== VK_AMD_shader_core_properties2 === + using VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagBitsAMD; + using VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD; + + //=== VK_EXT_validation_features === + using VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT; + using VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT; + + //=== VK_NV_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::ComponentTypeNV; + using VULKAN_HPP_NAMESPACE::ScopeNV; + + //=== VK_NV_coverage_reduction_mode === + using VULKAN_HPP_NAMESPACE::CoverageReductionModeNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV; + + //=== VK_EXT_provoking_vertex === + using VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + using VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT; + + //=== VK_EXT_line_rasterization === + using VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT; + + //=== VK_KHR_pipeline_executable_properties === + using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR; + + //=== VK_KHR_map_memory2 === + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR; + + //=== VK_EXT_surface_maintenance1 === + using VULKAN_HPP_NAMESPACE::PresentGravityFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT; + using VULKAN_HPP_NAMESPACE::PresentScalingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagBitsNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV; + using VULKAN_HPP_NAMESPACE::IndirectStateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV; + + //=== VK_EXT_depth_bias_control === + using VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT; + + //=== VK_EXT_device_memory_report === + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT; + + //=== VK_EXT_pipeline_creation_cache_control === + using VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_device_diagnostics_config === + using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagBitsNV; + using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + using VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagsEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_graphics_pipeline_library === + using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT; + using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags; + + //=== VK_NV_fragment_shading_rate_enums === + using VULKAN_HPP_NAMESPACE::FragmentShadingRateNV; + using VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV; + + //=== VK_NV_ray_tracing_motion_blur === + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagBitsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagBitsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV; + + //=== VK_EXT_image_compression_control === + using VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT; + + //=== VK_EXT_device_fault === + using VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_device_address_binding_report === + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT; + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagBitsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagBitsFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagBitsQNX; + using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::BuildMicromapFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT; + using VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT; + using VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT; + using VULKAN_HPP_NAMESPACE::MicromapCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT; + using VULKAN_HPP_NAMESPACE::MicromapTypeEXT; + using VULKAN_HPP_NAMESPACE::OpacityMicromapFormatEXT; + using VULKAN_HPP_NAMESPACE::OpacityMicromapSpecialIndexEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + using VULKAN_HPP_NAMESPACE::DisplacementMicromapFormatNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_memory_decompression === + using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagBitsNV; + using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV; + + //=== VK_EXT_subpass_merge_feedback === + using VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT; + + //=== VK_LUNARG_direct_driver_loading === + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagBitsLUNARG; + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG; + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG; + + //=== VK_EXT_rasterization_order_attachment_access === + using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags; + using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlagBits; + using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagBitsNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT; + using VULKAN_HPP_NAMESPACE::ShaderCreateFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT; + + //=== VK_NV_ray_tracing_invocation_reorder === + using VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV; + + //=== VK_KHR_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::ComponentTypeKHR; + using VULKAN_HPP_NAMESPACE::ScopeKHR; + + //========================= + //=== Index Type Traits === + //========================= + using VULKAN_HPP_NAMESPACE::IndexTypeValue; + + //====================== + //=== ENUM to_string === + //====================== +#if !defined( VULKAN_HPP_NO_TO_STRING ) + using VULKAN_HPP_NAMESPACE::to_string; + using VULKAN_HPP_NAMESPACE::toHexString; +#endif /*VULKAN_HPP_NO_TO_STRING*/ + + //============================= + //=== EXCEPTIONs AND ERRORs === + //============================= +#if !defined( VULKAN_HPP_NO_EXCEPTIONS ) + using VULKAN_HPP_NAMESPACE::DeviceLostError; + using VULKAN_HPP_NAMESPACE::Error; + using VULKAN_HPP_NAMESPACE::errorCategory; + using VULKAN_HPP_NAMESPACE::ErrorCategoryImpl; + using VULKAN_HPP_NAMESPACE::ExtensionNotPresentError; + using VULKAN_HPP_NAMESPACE::FeatureNotPresentError; + using VULKAN_HPP_NAMESPACE::FormatNotSupportedError; + using VULKAN_HPP_NAMESPACE::FragmentationError; + using VULKAN_HPP_NAMESPACE::FragmentedPoolError; + using VULKAN_HPP_NAMESPACE::ImageUsageNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::IncompatibleDisplayKHRError; + using VULKAN_HPP_NAMESPACE::IncompatibleDriverError; + using VULKAN_HPP_NAMESPACE::InitializationFailedError; + using VULKAN_HPP_NAMESPACE::InvalidDrmFormatModifierPlaneLayoutEXTError; + using VULKAN_HPP_NAMESPACE::InvalidExternalHandleError; + using VULKAN_HPP_NAMESPACE::InvalidOpaqueCaptureAddressError; + using VULKAN_HPP_NAMESPACE::InvalidShaderNVError; + using VULKAN_HPP_NAMESPACE::LayerNotPresentError; + using VULKAN_HPP_NAMESPACE::LogicError; + using VULKAN_HPP_NAMESPACE::make_error_code; + using VULKAN_HPP_NAMESPACE::make_error_condition; + using VULKAN_HPP_NAMESPACE::MemoryMapFailedError; + using VULKAN_HPP_NAMESPACE::NativeWindowInUseKHRError; + using VULKAN_HPP_NAMESPACE::NotPermittedKHRError; + using VULKAN_HPP_NAMESPACE::OutOfDateKHRError; + using VULKAN_HPP_NAMESPACE::OutOfDeviceMemoryError; + using VULKAN_HPP_NAMESPACE::OutOfHostMemoryError; + using VULKAN_HPP_NAMESPACE::OutOfPoolMemoryError; + using VULKAN_HPP_NAMESPACE::SurfaceLostKHRError; + using VULKAN_HPP_NAMESPACE::SystemError; + using VULKAN_HPP_NAMESPACE::TooManyObjectsError; + using VULKAN_HPP_NAMESPACE::UnknownError; + using VULKAN_HPP_NAMESPACE::ValidationFailedEXTError; + using VULKAN_HPP_NAMESPACE::VideoPictureLayoutNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoProfileCodecNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoProfileFormatNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoProfileOperationNotSupportedKHRError; + using VULKAN_HPP_NAMESPACE::VideoStdVersionNotSupportedKHRError; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + using VULKAN_HPP_NAMESPACE::FullScreenExclusiveModeLostEXTError; +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + using VULKAN_HPP_NAMESPACE::InvalidVideoStdParametersKHRError; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + using VULKAN_HPP_NAMESPACE::CompressionExhaustedEXTError; + using VULKAN_HPP_NAMESPACE::IncompatibleShaderBinaryEXTError; +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + + using VULKAN_HPP_NAMESPACE::createResultValueType; + using VULKAN_HPP_NAMESPACE::ignore; + using VULKAN_HPP_NAMESPACE::resultCheck; + using VULKAN_HPP_NAMESPACE::ResultValue; + using VULKAN_HPP_NAMESPACE::ResultValueType; + + //========================================= + //=== CONSTEXPR CONSTANTs AND FUNCTIONs === + //========================================= + using VULKAN_HPP_NAMESPACE::ApiVersion; + using VULKAN_HPP_NAMESPACE::ApiVersion10; + using VULKAN_HPP_NAMESPACE::ApiVersion11; + using VULKAN_HPP_NAMESPACE::ApiVersion12; + using VULKAN_HPP_NAMESPACE::ApiVersion13; + using VULKAN_HPP_NAMESPACE::apiVersionMajor; + using VULKAN_HPP_NAMESPACE::apiVersionMinor; + using VULKAN_HPP_NAMESPACE::apiVersionPatch; + using VULKAN_HPP_NAMESPACE::apiVersionVariant; + using VULKAN_HPP_NAMESPACE::AttachmentUnused; + using VULKAN_HPP_NAMESPACE::False; + using VULKAN_HPP_NAMESPACE::HeaderVersion; + using VULKAN_HPP_NAMESPACE::HeaderVersionComplete; + using VULKAN_HPP_NAMESPACE::LodClampNone; + using VULKAN_HPP_NAMESPACE::LuidSize; + using VULKAN_HPP_NAMESPACE::makeApiVersion; + using VULKAN_HPP_NAMESPACE::makeVersion; + using VULKAN_HPP_NAMESPACE::MaxDescriptionSize; + using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSize; + using VULKAN_HPP_NAMESPACE::MaxDriverInfoSize; + using VULKAN_HPP_NAMESPACE::MaxDriverNameSize; + using VULKAN_HPP_NAMESPACE::MaxExtensionNameSize; + using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeKhr; + using VULKAN_HPP_NAMESPACE::MaxMemoryHeaps; + using VULKAN_HPP_NAMESPACE::MaxMemoryTypes; + using VULKAN_HPP_NAMESPACE::MaxPhysicalDeviceNameSize; + using VULKAN_HPP_NAMESPACE::MaxShaderModuleIdentifierSizeExt; + using VULKAN_HPP_NAMESPACE::QueueFamilyExternal; + using VULKAN_HPP_NAMESPACE::QueueFamilyForeignExt; + using VULKAN_HPP_NAMESPACE::QueueFamilyIgnored; + using VULKAN_HPP_NAMESPACE::Remaining3DSlicesExt; + using VULKAN_HPP_NAMESPACE::RemainingArrayLayers; + using VULKAN_HPP_NAMESPACE::RemainingMipLevels; + using VULKAN_HPP_NAMESPACE::ShaderUnusedKhr; + using VULKAN_HPP_NAMESPACE::SubpassExternal; + using VULKAN_HPP_NAMESPACE::True; + using VULKAN_HPP_NAMESPACE::UuidSize; + using VULKAN_HPP_NAMESPACE::versionMajor; + using VULKAN_HPP_NAMESPACE::versionMinor; + using VULKAN_HPP_NAMESPACE::versionPatch; + using VULKAN_HPP_NAMESPACE::WholeSize; + + //=============== + //=== STRUCTs === + //=============== + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::AllocationCallbacks; + using VULKAN_HPP_NAMESPACE::ApplicationInfo; + using VULKAN_HPP_NAMESPACE::AttachmentDescription; + using VULKAN_HPP_NAMESPACE::AttachmentReference; + using VULKAN_HPP_NAMESPACE::BaseInStructure; + using VULKAN_HPP_NAMESPACE::BaseOutStructure; + using VULKAN_HPP_NAMESPACE::BindSparseInfo; + using VULKAN_HPP_NAMESPACE::BufferCopy; + using VULKAN_HPP_NAMESPACE::BufferCreateInfo; + using VULKAN_HPP_NAMESPACE::BufferImageCopy; + using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier; + using VULKAN_HPP_NAMESPACE::BufferViewCreateInfo; + using VULKAN_HPP_NAMESPACE::ClearAttachment; + using VULKAN_HPP_NAMESPACE::ClearColorValue; + using VULKAN_HPP_NAMESPACE::ClearDepthStencilValue; + using VULKAN_HPP_NAMESPACE::ClearRect; + using VULKAN_HPP_NAMESPACE::ClearValue; + using VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo; + using VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo; + using VULKAN_HPP_NAMESPACE::ComponentMapping; + using VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo; + using VULKAN_HPP_NAMESPACE::CopyDescriptorSet; + using VULKAN_HPP_NAMESPACE::DescriptorBufferInfo; + using VULKAN_HPP_NAMESPACE::DescriptorImageInfo; + using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorPoolSize; + using VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo; + using VULKAN_HPP_NAMESPACE::DeviceCreateInfo; + using VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo; + using VULKAN_HPP_NAMESPACE::DispatchIndirectCommand; + using VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand; + using VULKAN_HPP_NAMESPACE::DrawIndirectCommand; + using VULKAN_HPP_NAMESPACE::EventCreateInfo; + using VULKAN_HPP_NAMESPACE::ExtensionProperties; + using VULKAN_HPP_NAMESPACE::Extent2D; + using VULKAN_HPP_NAMESPACE::Extent3D; + using VULKAN_HPP_NAMESPACE::FenceCreateInfo; + using VULKAN_HPP_NAMESPACE::FormatProperties; + using VULKAN_HPP_NAMESPACE::FramebufferCreateInfo; + using VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageBlit; + using VULKAN_HPP_NAMESPACE::ImageCopy; + using VULKAN_HPP_NAMESPACE::ImageCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageFormatProperties; + using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier; + using VULKAN_HPP_NAMESPACE::ImageResolve; + using VULKAN_HPP_NAMESPACE::ImageSubresource; + using VULKAN_HPP_NAMESPACE::ImageSubresourceLayers; + using VULKAN_HPP_NAMESPACE::ImageSubresourceRange; + using VULKAN_HPP_NAMESPACE::ImageViewCreateInfo; + using VULKAN_HPP_NAMESPACE::InstanceCreateInfo; + using VULKAN_HPP_NAMESPACE::LayerProperties; + using VULKAN_HPP_NAMESPACE::MappedMemoryRange; + using VULKAN_HPP_NAMESPACE::MemoryAllocateInfo; + using VULKAN_HPP_NAMESPACE::MemoryBarrier; + using VULKAN_HPP_NAMESPACE::MemoryHeap; + using VULKAN_HPP_NAMESPACE::MemoryRequirements; + using VULKAN_HPP_NAMESPACE::MemoryType; + using VULKAN_HPP_NAMESPACE::Offset2D; + using VULKAN_HPP_NAMESPACE::Offset3D; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties; + using VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PushConstantRange; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo; + using VULKAN_HPP_NAMESPACE::QueueFamilyProperties; + using VULKAN_HPP_NAMESPACE::Rect2D; + using VULKAN_HPP_NAMESPACE::RenderPassBeginInfo; + using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo; + using VULKAN_HPP_NAMESPACE::SamplerCreateInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo; + using VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo; + using VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo; + using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryBind; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements; + using VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo; + using VULKAN_HPP_NAMESPACE::SparseMemoryBind; + using VULKAN_HPP_NAMESPACE::SpecializationInfo; + using VULKAN_HPP_NAMESPACE::SpecializationMapEntry; + using VULKAN_HPP_NAMESPACE::StencilOpState; + using VULKAN_HPP_NAMESPACE::SubmitInfo; + using VULKAN_HPP_NAMESPACE::SubpassDependency; + using VULKAN_HPP_NAMESPACE::SubpassDescription; + using VULKAN_HPP_NAMESPACE::SubresourceLayout; + using VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDescription; + using VULKAN_HPP_NAMESPACE::Viewport; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSet; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo; + using VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfoKHR; + using VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo; + using VULKAN_HPP_NAMESPACE::BindBufferMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo; + using VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImageMemoryInfo; + using VULKAN_HPP_NAMESPACE::BindImageMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo; + using VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2; + using VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2KHR; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupportKHR; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntryKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo; + using VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceQueueInfo2; + using VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo; + using VULKAN_HPP_NAMESPACE::ExportFenceCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo; + using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo; + using VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExternalBufferProperties; + using VULKAN_HPP_NAMESPACE::ExternalBufferPropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalFenceProperties; + using VULKAN_HPP_NAMESPACE::ExternalFencePropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties; + using VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo; + using VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo; + using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ExternalMemoryProperties; + using VULKAN_HPP_NAMESPACE::ExternalMemoryPropertiesKHR; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties; + using VULKAN_HPP_NAMESPACE::ExternalSemaphorePropertiesKHR; + using VULKAN_HPP_NAMESPACE::FormatProperties2; + using VULKAN_HPP_NAMESPACE::FormatProperties2KHR; + using VULKAN_HPP_NAMESPACE::ImageFormatProperties2; + using VULKAN_HPP_NAMESPACE::ImageFormatProperties2KHR; + using VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2; + using VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2KHR; + using VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo; + using VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2; + using VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2KHR; + using VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference; + using VULKAN_HPP_NAMESPACE::InputAttachmentAspectReferenceKHR; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements; + using VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirementsKHR; + using VULKAN_HPP_NAMESPACE::MemoryRequirements2; + using VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIDPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParameterFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointerFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointerFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo; + using VULKAN_HPP_NAMESPACE::QueueFamilyProperties2; + using VULKAN_HPP_NAMESPACE::QueueFamilyProperties2KHR; + using VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo; + using VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo; + using VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatPropertiesKHR; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfoKHR; + using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2; + using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2KHR; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2; + using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2KHR; + + //=== VK_VERSION_1_2 === + using VULKAN_HPP_NAMESPACE::AttachmentDescription2; + using VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout; + using VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayoutKHR; + using VULKAN_HPP_NAMESPACE::AttachmentReference2; + using VULKAN_HPP_NAMESPACE::AttachmentReference2KHR; + using VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout; + using VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayoutKHR; + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo; + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoEXT; + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR; + using VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo; + using VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ConformanceVersion; + using VULKAN_HPP_NAMESPACE::ConformanceVersionKHR; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport; + using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupportEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo; + using VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfoKHR; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo; + using VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo; + using VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo; + using VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolvePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloat16Int8FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphorePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeaturesKHR; + using VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo; + using VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2; + using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR; + using VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo; + using VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreSignalInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitInfoKHR; + using VULKAN_HPP_NAMESPACE::SubpassBeginInfo; + using VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR; + using VULKAN_HPP_NAMESPACE::SubpassDependency2; + using VULKAN_HPP_NAMESPACE::SubpassDependency2KHR; + using VULKAN_HPP_NAMESPACE::SubpassDescription2; + using VULKAN_HPP_NAMESPACE::SubpassDescription2KHR; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve; + using VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolveKHR; + using VULKAN_HPP_NAMESPACE::SubpassEndInfo; + using VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR; + using VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo; + using VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfoKHR; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::BlitImageInfo2; + using VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::BufferCopy2; + using VULKAN_HPP_NAMESPACE::BufferCopy2KHR; + using VULKAN_HPP_NAMESPACE::BufferImageCopy2; + using VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR; + using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2; + using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR; + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfoKHR; + using VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo; + using VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyBufferInfo2; + using VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR; + using VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2; + using VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::CopyImageInfo2; + using VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2; + using VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR; + using VULKAN_HPP_NAMESPACE::DependencyInfo; + using VULKAN_HPP_NAMESPACE::DependencyInfoKHR; + using VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo; + using VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements; + using VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirementsKHR; + using VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements; + using VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirementsKHR; + using VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo; + using VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::FormatProperties3; + using VULKAN_HPP_NAMESPACE::FormatProperties3KHR; + using VULKAN_HPP_NAMESPACE::ImageBlit2; + using VULKAN_HPP_NAMESPACE::ImageBlit2KHR; + using VULKAN_HPP_NAMESPACE::ImageCopy2; + using VULKAN_HPP_NAMESPACE::ImageCopy2KHR; + using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2; + using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR; + using VULKAN_HPP_NAMESPACE::ImageResolve2; + using VULKAN_HPP_NAMESPACE::ImageResolve2KHR; + using VULKAN_HPP_NAMESPACE::MemoryBarrier2; + using VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedback; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT; + using VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo; + using VULKAN_HPP_NAMESPACE::RenderingAttachmentInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingInfo; + using VULKAN_HPP_NAMESPACE::RenderingInfoKHR; + using VULKAN_HPP_NAMESPACE::ResolveImageInfo2; + using VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR; + using VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo; + using VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::ShaderRequiredSubgroupSizeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::SubmitInfo2; + using VULKAN_HPP_NAMESPACE::SubmitInfo2KHR; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::SurfaceFormatKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR; + using VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PresentInfoKHR; + using VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR; + using VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR; + using VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR; + using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR; + + //=== VK_KHR_display_swapchain === + using VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT; + + //=== VK_AMD_rasterization_order === + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD; + + //=== VK_EXT_debug_marker === + using VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR; + using VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR; + using VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR; + + //=== VK_KHR_video_decode_queue === + using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR; + + //=== VK_NV_dedicated_allocation === + using VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV; + using VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV; + using VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV; + + //=== VK_EXT_transform_feedback === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX; + using VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX; + using VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX; + + //=== VK_NVX_image_view_handle === + using VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX; + using VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h264 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoEXT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_EXT_video_encode_h265 === + using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoEXT; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoEXT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_video_decode_h264 === + using VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR; + + //=== VK_AMD_texture_gather_bias_lod === + using VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD; + + //=== VK_AMD_shader_info === + using VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD; + using VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD; + + //=== VK_KHR_dynamic_rendering === + using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD; + using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoNV; + using VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX; + using VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV; + + //=== VK_NV_external_memory_capabilities === + using VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV; + + //=== VK_NV_external_memory === + using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV; + using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + using VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV; + using VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + using VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_validation_flags === + using VULKAN_HPP_NAMESPACE::ValidationFlagsEXT; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + using VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_astc_decode_mode === + using VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT; + + //=== VK_EXT_pipeline_robustness === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + using VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + using VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR; + using VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + using VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + using VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + using VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR; + using VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR; + + //=== VK_KHR_push_descriptor === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR; + + //=== VK_EXT_conditional_rendering === + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT; + using VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT; + + //=== VK_KHR_incremental_present === + using VULKAN_HPP_NAMESPACE::PresentRegionKHR; + using VULKAN_HPP_NAMESPACE::PresentRegionsKHR; + using VULKAN_HPP_NAMESPACE::RectLayerKHR; + + //=== VK_NV_clip_space_w_scaling === + using VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::ViewportWScalingNV; + + //=== VK_EXT_display_surface_counter === + using VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT; + + //=== VK_EXT_display_control === + using VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT; + using VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT; + using VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT; + + //=== VK_GOOGLE_display_timing === + using VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE; + using VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE; + using VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE; + using VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE; + + //=== VK_NVX_multiview_per_view_attributes === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + //=== VK_NV_viewport_swizzle === + using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::ViewportSwizzleNV; + + //=== VK_EXT_discard_rectangles === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT; + + //=== VK_EXT_conservative_rasterization === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT; + + //=== VK_EXT_depth_clip_enable === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT; + + //=== VK_EXT_hdr_metadata === + using VULKAN_HPP_NAMESPACE::HdrMetadataEXT; + using VULKAN_HPP_NAMESPACE::XYColorEXT; + + //=== VK_KHR_shared_presentable_image === + using VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + using VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + using VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR; + using VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR; + + //=== VK_KHR_performance_query === + using VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterKHR; + using VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR; + using VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR; + using VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR; + + //=== VK_KHR_get_surface_capabilities2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR; + using VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR; + using VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR; + + //=== VK_KHR_get_display_properties2 === + using VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR; + using VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR; + using VULKAN_HPP_NAMESPACE::DisplayProperties2KHR; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT; + using VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID; + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID; + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID; + using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID; + using VULKAN_HPP_NAMESPACE::ExternalFormatANDROID; + using VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID; + using VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_EXT_sample_locations === + using VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT; + using VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT; + using VULKAN_HPP_NAMESPACE::SampleLocationEXT; + using VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT; + using VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT; + + //=== VK_EXT_blend_operation_advanced === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT; + + //=== VK_NV_fragment_coverage_to_color === + using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::AabbPositionsKHR; + using VULKAN_HPP_NAMESPACE::AabbPositionsNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR; + using VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR; + using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR; + using VULKAN_HPP_NAMESPACE::TransformMatrixKHR; + using VULKAN_HPP_NAMESPACE::TransformMatrixNV; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR; + + //=== VK_KHR_ray_tracing_pipeline === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR; + using VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR; + using VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR; + + //=== VK_KHR_ray_query === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR; + + //=== VK_NV_framebuffer_mixed_samples === + using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV; + + //=== VK_NV_shader_sm_builtins === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV; + + //=== VK_EXT_image_drm_format_modifier === + using VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT; + using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT; + using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT; + using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT; + using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + using VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV; + using VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV; + using VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV; + using VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV; + using VULKAN_HPP_NAMESPACE::GeometryAABBNV; + using VULKAN_HPP_NAMESPACE::GeometryDataNV; + using VULKAN_HPP_NAMESPACE::GeometryNV; + using VULKAN_HPP_NAMESPACE::GeometryTrianglesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV; + using VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV; + using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV; + using VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV; + + //=== VK_NV_representative_fragment_test === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV; + + //=== VK_EXT_filter_cubic === + using VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT; + + //=== VK_EXT_external_memory_host === + using VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT; + using VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT; + + //=== VK_KHR_shader_clock === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR; + + //=== VK_AMD_pipeline_compiler_control === + using VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD; + + //=== VK_EXT_calibrated_timestamps === + using VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT; + + //=== VK_AMD_shader_core_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD; + + //=== VK_KHR_video_decode_h265 === + using VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR; + + //=== VK_KHR_global_priority === + using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesEXT; + using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR; + + //=== VK_AMD_memory_overallocation_behavior === + using VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD; + + //=== VK_EXT_vertex_attribute_divisor === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + using VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_compute_shader_derivatives === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV; + + //=== VK_NV_mesh_shader === + using VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV; + + //=== VK_NV_shader_image_footprint === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV; + + //=== VK_NV_scissor_exclusive === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV; + + //=== VK_NV_device_diagnostic_checkpoints === + using VULKAN_HPP_NAMESPACE::CheckpointDataNV; + using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV; + + //=== VK_INTEL_shader_integer_functions2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL; + using VULKAN_HPP_NAMESPACE::PerformanceValueINTEL; + using VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL; + using VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL; + + //=== VK_EXT_pci_bus_info === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT; + + //=== VK_AMD_display_native_hdr === + using VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD; + using VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT; + using VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT; + + //=== VK_KHR_fragment_shading_rate === + using VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR; + + //=== VK_AMD_shader_core_properties2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD; + + //=== VK_AMD_device_coherent_memory === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD; + + //=== VK_EXT_shader_image_atomic_int64 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + //=== VK_EXT_memory_budget === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT; + + //=== VK_EXT_memory_priority === + using VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT; + + //=== VK_KHR_surface_protected_capabilities === + using VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR; + + //=== VK_NV_dedicated_allocation_image_aliasing === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + //=== VK_EXT_buffer_device_address === + using VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferAddressFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT; + + //=== VK_EXT_validation_features === + using VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT; + + //=== VK_KHR_present_wait === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR; + + //=== VK_NV_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV; + + //=== VK_NV_coverage_reduction_mode === + using VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV; + + //=== VK_EXT_fragment_shader_interlock === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + //=== VK_EXT_ycbcr_image_arrays === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT; + + //=== VK_EXT_provoking_vertex === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT; + using VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT; + using VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT; + + //=== VK_EXT_line_rasterization === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT; + + //=== VK_EXT_shader_atomic_float === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT; + + //=== VK_EXT_index_type_uint8 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT; + + //=== VK_EXT_extended_dynamic_state === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT; + + //=== VK_KHR_pipeline_executable_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR; + using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR; + using VULKAN_HPP_NAMESPACE::PipelineInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineInfoKHR; + + //=== VK_KHR_map_memory2 === + using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR; + + //=== VK_EXT_shader_atomic_float2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + //=== VK_EXT_surface_maintenance1 === + using VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT; + using VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT; + using VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT; + + //=== VK_EXT_swapchain_maintenance1 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT; + using VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV; + using VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV; + using VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV; + using VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + using VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV; + + //=== VK_NV_inherited_viewport_scissor === + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV; + + //=== VK_EXT_texel_buffer_alignment === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + + //=== VK_QCOM_render_pass_transform === + using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM; + using VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM; + + //=== VK_EXT_depth_bias_control === + using VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT; + using VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT; + + //=== VK_EXT_device_memory_report === + using VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT; + + //=== VK_EXT_robustness2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT; + + //=== VK_EXT_custom_border_color === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT; + using VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT; + + //=== VK_KHR_pipeline_library === + using VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR; + + //=== VK_NV_present_barrier === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV; + using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV; + using VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV; + + //=== VK_KHR_present_id === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PresentIdKHR; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_video_encode_queue === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR; + using VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_device_diagnostics_config === + using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV; + + //=== VK_NV_low_latency === + using VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + using VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT; + using VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT; + using VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + using VULKAN_HPP_NAMESPACE::CheckpointData2NV; + using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV; + + //=== VK_EXT_descriptor_buffer === + using VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT; + using VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT; + using VULKAN_HPP_NAMESPACE::DescriptorDataEXT; + using VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT; + using VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT; + using VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT; + + //=== VK_EXT_graphics_pipeline_library === + using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + + //=== VK_KHR_fragment_shader_barycentric === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + //=== VK_NV_fragment_shading_rate_enums === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + using VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV; + + //=== VK_NV_ray_tracing_motion_blur === + using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV; + using VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV; + using VULKAN_HPP_NAMESPACE::SRTDataNV; + + //=== VK_EXT_mesh_shader === + using VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT; + + //=== VK_EXT_ycbcr_2plane_444_formats === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + //=== VK_EXT_fragment_density_map2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT; + + //=== VK_QCOM_rotated_copy_commands === + using VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM; + + //=== VK_KHR_workgroup_memory_explicit_layout === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + //=== VK_EXT_image_compression_control === + using VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT; + using VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT; + using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT; + + //=== VK_EXT_attachment_feedback_loop_layout === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + + //=== VK_EXT_4444_formats === + using VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT; + + //=== VK_EXT_device_fault === + using VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT; + using VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT; + + //=== VK_EXT_rgba10x6_formats === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT; + using VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT; + + //=== VK_EXT_physical_device_drm === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT; + + //=== VK_EXT_device_address_binding_report === + using VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT; + + //=== VK_EXT_depth_clip_control === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT; + + //=== VK_EXT_primitive_topology_list_restart === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + using VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + using VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA; + using VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA; + using VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA; + using VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI; + using VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI; + + //=== VK_HUAWEI_invocation_mask === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI; + + //=== VK_NV_external_memory_rdma === + using VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV; + + //=== VK_EXT_pipeline_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT; + + //=== VK_EXT_multisampled_render_to_single_sampled === + using VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + using VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT; + + //=== VK_EXT_extended_dynamic_state2 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT; + + //=== VK_EXT_primitives_generated_query === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + //=== VK_KHR_ray_tracing_maintenance1 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR; + using VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR; + + //=== VK_EXT_image_view_min_lod === + using VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT; + + //=== VK_EXT_multi_draw === + using VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT; + using VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT; + + //=== VK_EXT_image_2d_view_of_3d === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT; + + //=== VK_EXT_shader_tile_image === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT; + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT; + using VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::MicromapTriangleEXT; + using VULKAN_HPP_NAMESPACE::MicromapUsageEXT; + using VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + using VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesDisplacementMicromapNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_HUAWEI_cluster_culling_shader === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + + //=== VK_EXT_border_color_swizzle === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT; + using VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT; + + //=== VK_EXT_pageable_device_local_memory === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + + //=== VK_ARM_shader_core_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM; + + //=== VK_EXT_image_sliced_view_of_3d === + using VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + + //=== VK_VALVE_descriptor_set_host_mapping === + using VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + + //=== VK_EXT_depth_clamp_zero_one === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT; + + //=== VK_EXT_non_seamless_cube_map === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + + //=== VK_QCOM_fragment_density_map_offset === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + using VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM; + + //=== VK_NV_copy_memory_indirect === + using VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV; + + //=== VK_NV_memory_decompression === + using VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV; + + //=== VK_NV_linear_color_attachment === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV; + + //=== VK_EXT_image_compression_control_swapchain === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + + //=== VK_QCOM_image_processing === + using VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM; + + //=== VK_EXT_external_memory_acquire_unmodified === + using VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT; + + //=== VK_EXT_extended_dynamic_state3 === + using VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT; + using VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT; + + //=== VK_EXT_subpass_merge_feedback === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + using VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT; + using VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT; + + //=== VK_LUNARG_direct_driver_loading === + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG; + using VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG; + + //=== VK_EXT_shader_module_identifier === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT; + + //=== VK_EXT_rasterization_order_attachment_access === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV; + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV; + + //=== VK_EXT_legacy_dithering === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT; + + //=== VK_EXT_pipeline_protected_access === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT; + + //=== VK_KHR_ray_tracing_position_fetch === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT; + using VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT; + + //=== VK_QCOM_tile_properties === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM; + using VULKAN_HPP_NAMESPACE::TilePropertiesQCOM; + + //=== VK_SEC_amigo_profiling === + using VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC; + + //=== VK_QCOM_multiview_per_view_viewports === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + //=== VK_NV_ray_tracing_invocation_reorder === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV; + + //=== VK_EXT_mutable_descriptor_type === + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE; + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT; + using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + + //=== VK_ARM_shader_core_builtins === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + //=== VK_EXT_pipeline_library_group_handles === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + //=== VK_KHR_cooperative_matrix === + using VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR; + + //=== VK_QCOM_multiview_per_view_render_areas === + using VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + using VULKAN_HPP_NAMESPACE::ExternalFormatQNX; + using VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + using VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX; + using VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=============== + //=== HANDLEs === + //=============== + + using VULKAN_HPP_NAMESPACE::isVulkanHandleType; + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::Buffer; + using VULKAN_HPP_NAMESPACE::BufferView; + using VULKAN_HPP_NAMESPACE::CommandBuffer; + using VULKAN_HPP_NAMESPACE::CommandPool; + using VULKAN_HPP_NAMESPACE::DescriptorPool; + using VULKAN_HPP_NAMESPACE::DescriptorSet; + using VULKAN_HPP_NAMESPACE::DescriptorSetLayout; + using VULKAN_HPP_NAMESPACE::Device; + using VULKAN_HPP_NAMESPACE::DeviceMemory; + using VULKAN_HPP_NAMESPACE::Event; + using VULKAN_HPP_NAMESPACE::Fence; + using VULKAN_HPP_NAMESPACE::Framebuffer; + using VULKAN_HPP_NAMESPACE::Image; + using VULKAN_HPP_NAMESPACE::ImageView; + using VULKAN_HPP_NAMESPACE::Instance; + using VULKAN_HPP_NAMESPACE::PhysicalDevice; + using VULKAN_HPP_NAMESPACE::Pipeline; + using VULKAN_HPP_NAMESPACE::PipelineCache; + using VULKAN_HPP_NAMESPACE::PipelineLayout; + using VULKAN_HPP_NAMESPACE::QueryPool; + using VULKAN_HPP_NAMESPACE::Queue; + using VULKAN_HPP_NAMESPACE::RenderPass; + using VULKAN_HPP_NAMESPACE::Sampler; + using VULKAN_HPP_NAMESPACE::Semaphore; + using VULKAN_HPP_NAMESPACE::ShaderModule; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; + using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::PrivateDataSlot; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::SurfaceKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::SwapchainKHR; + + //=== VK_KHR_display === + using VULKAN_HPP_NAMESPACE::DisplayKHR; + using VULKAN_HPP_NAMESPACE::DisplayModeKHR; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::VideoSessionKHR; + using VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::CuFunctionNVX; + using VULKAN_HPP_NAMESPACE::CuModuleNVX; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::ValidationCacheEXT; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::AccelerationStructureNV; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_NAMESPACE::DeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::MicromapEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::ShaderEXT; + + //====================== + //=== UNIQUE HANDLEs === + //====================== + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::UniqueBuffer; + using VULKAN_HPP_NAMESPACE::UniqueBufferView; + using VULKAN_HPP_NAMESPACE::UniqueCommandBuffer; + using VULKAN_HPP_NAMESPACE::UniqueCommandPool; + using VULKAN_HPP_NAMESPACE::UniqueDescriptorPool; + using VULKAN_HPP_NAMESPACE::UniqueDescriptorSet; + using VULKAN_HPP_NAMESPACE::UniqueDescriptorSetLayout; + using VULKAN_HPP_NAMESPACE::UniqueDevice; + using VULKAN_HPP_NAMESPACE::UniqueDeviceMemory; + using VULKAN_HPP_NAMESPACE::UniqueEvent; + using VULKAN_HPP_NAMESPACE::UniqueFence; + using VULKAN_HPP_NAMESPACE::UniqueFramebuffer; + using VULKAN_HPP_NAMESPACE::UniqueImage; + using VULKAN_HPP_NAMESPACE::UniqueImageView; + using VULKAN_HPP_NAMESPACE::UniqueInstance; + using VULKAN_HPP_NAMESPACE::UniquePipeline; + using VULKAN_HPP_NAMESPACE::UniquePipelineCache; + using VULKAN_HPP_NAMESPACE::UniquePipelineLayout; + using VULKAN_HPP_NAMESPACE::UniqueQueryPool; + using VULKAN_HPP_NAMESPACE::UniqueRenderPass; + using VULKAN_HPP_NAMESPACE::UniqueSampler; + using VULKAN_HPP_NAMESPACE::UniqueSemaphore; + using VULKAN_HPP_NAMESPACE::UniqueShaderModule; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::UniqueDescriptorUpdateTemplate; + using VULKAN_HPP_NAMESPACE::UniqueSamplerYcbcrConversion; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_NAMESPACE::UniquePrivateDataSlot; + + //=== VK_KHR_surface === + using VULKAN_HPP_NAMESPACE::UniqueSurfaceKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_NAMESPACE::UniqueSwapchainKHR; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::UniqueDebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_NAMESPACE::UniqueVideoSessionKHR; + using VULKAN_HPP_NAMESPACE::UniqueVideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_NAMESPACE::UniqueCuFunctionNVX; + using VULKAN_HPP_NAMESPACE::UniqueCuModuleNVX; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::UniqueDebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_NAMESPACE::UniqueValidationCacheEXT; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureNV; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_NAMESPACE::UniqueDeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_NAMESPACE::UniqueIndirectCommandsLayoutNV; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_NAMESPACE::UniqueBufferCollectionFUCHSIA; +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_NAMESPACE::UniqueMicromapEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_NAMESPACE::UniqueOpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_NAMESPACE::UniqueHandleTraits; + using VULKAN_HPP_NAMESPACE::UniqueShaderEXT; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + //=========================== + //=== COMMAND Definitions === + //=========================== + using VULKAN_HPP_NAMESPACE::createInstance; + using VULKAN_HPP_NAMESPACE::enumerateInstanceExtensionProperties; + using VULKAN_HPP_NAMESPACE::enumerateInstanceLayerProperties; + using VULKAN_HPP_NAMESPACE::enumerateInstanceVersion; + +#if !defined( VULKAN_HPP_NO_SMART_HANDLE ) + using VULKAN_HPP_NAMESPACE::createInstanceUnique; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + using VULKAN_HPP_NAMESPACE::StructExtends; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#if defined( VULKAN_HPP_DYNAMIC_LOADER_TOOL ) + using VULKAN_HPP_NAMESPACE::DynamicLoader; +#endif /*VULKAN_HPP_DYNAMIC_LOADER_TOOL*/ + + using VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic; + + //===================== + //=== Format Traits === + //===================== + using VULKAN_HPP_NAMESPACE::blockExtent; + using VULKAN_HPP_NAMESPACE::blockSize; + using VULKAN_HPP_NAMESPACE::compatibilityClass; + using VULKAN_HPP_NAMESPACE::componentBits; + using VULKAN_HPP_NAMESPACE::componentCount; + using VULKAN_HPP_NAMESPACE::componentName; + using VULKAN_HPP_NAMESPACE::componentNumericFormat; + using VULKAN_HPP_NAMESPACE::componentPlaneIndex; + using VULKAN_HPP_NAMESPACE::componentsAreCompressed; + using VULKAN_HPP_NAMESPACE::compressionScheme; + using VULKAN_HPP_NAMESPACE::isCompressed; + using VULKAN_HPP_NAMESPACE::packed; + using VULKAN_HPP_NAMESPACE::planeCompatibleFormat; + using VULKAN_HPP_NAMESPACE::planeCount; + using VULKAN_HPP_NAMESPACE::planeHeightDivisor; + using VULKAN_HPP_NAMESPACE::planeWidthDivisor; + using VULKAN_HPP_NAMESPACE::texelsPerBlock; + + //====================================== + //=== Extension inspection functions === + //====================================== + using VULKAN_HPP_NAMESPACE::getDeprecatedExtensions; + using VULKAN_HPP_NAMESPACE::getDeviceExtensions; + using VULKAN_HPP_NAMESPACE::getExtensionDepends; + using VULKAN_HPP_NAMESPACE::getExtensionDeprecatedBy; + using VULKAN_HPP_NAMESPACE::getExtensionObsoletedBy; + using VULKAN_HPP_NAMESPACE::getExtensionPromotedTo; + using VULKAN_HPP_NAMESPACE::getInstanceExtensions; + using VULKAN_HPP_NAMESPACE::getObsoletedExtensions; + using VULKAN_HPP_NAMESPACE::getPromotedExtensions; + using VULKAN_HPP_NAMESPACE::isDeprecatedExtension; + using VULKAN_HPP_NAMESPACE::isDeviceExtension; + using VULKAN_HPP_NAMESPACE::isInstanceExtension; + using VULKAN_HPP_NAMESPACE::isObsoletedExtension; + using VULKAN_HPP_NAMESPACE::isPromotedExtension; + + export namespace VULKAN_HPP_RAII_NAMESPACE + { + //====================== + //=== RAII HARDCODED === + //====================== + + using VULKAN_HPP_RAII_NAMESPACE::Context; + using VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher; + using VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher; + using VULKAN_HPP_RAII_NAMESPACE::exchange; + using VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher; + + //==================== + //=== RAII HANDLEs === + //==================== + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_RAII_NAMESPACE::Buffer; + using VULKAN_HPP_RAII_NAMESPACE::BufferView; + using VULKAN_HPP_RAII_NAMESPACE::CommandBuffer; + using VULKAN_HPP_RAII_NAMESPACE::CommandBuffers; + using VULKAN_HPP_RAII_NAMESPACE::CommandPool; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorPool; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorSet; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout; + using VULKAN_HPP_RAII_NAMESPACE::DescriptorSets; + using VULKAN_HPP_RAII_NAMESPACE::Device; + using VULKAN_HPP_RAII_NAMESPACE::DeviceMemory; + using VULKAN_HPP_RAII_NAMESPACE::Event; + using VULKAN_HPP_RAII_NAMESPACE::Fence; + using VULKAN_HPP_RAII_NAMESPACE::Framebuffer; + using VULKAN_HPP_RAII_NAMESPACE::Image; + using VULKAN_HPP_RAII_NAMESPACE::ImageView; + using VULKAN_HPP_RAII_NAMESPACE::Instance; + using VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice; + using VULKAN_HPP_RAII_NAMESPACE::PhysicalDevices; + using VULKAN_HPP_RAII_NAMESPACE::Pipeline; + using VULKAN_HPP_RAII_NAMESPACE::PipelineCache; + using VULKAN_HPP_RAII_NAMESPACE::PipelineLayout; + using VULKAN_HPP_RAII_NAMESPACE::Pipelines; + using VULKAN_HPP_RAII_NAMESPACE::QueryPool; + using VULKAN_HPP_RAII_NAMESPACE::Queue; + using VULKAN_HPP_RAII_NAMESPACE::RenderPass; + using VULKAN_HPP_RAII_NAMESPACE::Sampler; + using VULKAN_HPP_RAII_NAMESPACE::Semaphore; + using VULKAN_HPP_RAII_NAMESPACE::ShaderModule; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate; + using VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion; + + //=== VK_VERSION_1_3 === + using VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot; + + //=== VK_KHR_surface === + using VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR; + + //=== VK_KHR_swapchain === + using VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR; + using VULKAN_HPP_RAII_NAMESPACE::SwapchainKHRs; + + //=== VK_KHR_display === + using VULKAN_HPP_RAII_NAMESPACE::DisplayKHR; + using VULKAN_HPP_RAII_NAMESPACE::DisplayKHRs; + using VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT; + + //=== VK_KHR_video_queue === + using VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR; + using VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR; + + //=== VK_NVX_binary_import === + using VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX; + using VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT; + + //=== VK_KHR_acceleration_structure === + using VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR; + + //=== VK_EXT_validation_cache === + using VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV; + + //=== VK_INTEL_performance_query === + using VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL; + + //=== VK_KHR_deferred_host_operations === + using VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR; + + //=== VK_NV_device_generated_commands === + using VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + using VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + using VULKAN_HPP_RAII_NAMESPACE::MicromapEXT; + + //=== VK_NV_optical_flow === + using VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV; + + //=== VK_EXT_shader_object === + using VULKAN_HPP_RAII_NAMESPACE::ShaderEXT; + using VULKAN_HPP_RAII_NAMESPACE::ShaderEXTs; + + } // namespace VULKAN_HPP_RAII_NAMESPACE +} // namespace VULKAN_HPP_NAMESPACE diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp index c58b700..fa021e2 100644 --- a/include/vulkan/vulkan.hpp +++ b/include/vulkan/vulkan.hpp @@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 255, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 256, "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION @@ -6021,7 +6021,7 @@ namespace VULKAN_HPP_NAMESPACE } extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; # else - static inline ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic & getDispatchLoaderStatic() + inline ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic & getDispatchLoaderStatic() { static ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic dls; return dls; @@ -6634,9 +6634,9 @@ namespace VULKAN_HPP_NAMESPACE IncompatibleShaderBinaryEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorIncompatibleShaderBinaryEXT ), message ) {} }; - namespace + namespace detail { - [[noreturn]] void throwResultException( Result result, char const * message ) + [[noreturn]] VULKAN_HPP_INLINE void throwResultException( Result result, char const * message ) { switch ( result ) { @@ -6682,7 +6682,7 @@ namespace VULKAN_HPP_NAMESPACE default: throw SystemError( make_error_code( result ), message ); } } - } // namespace + } // namespace detail #endif template @@ -6734,7 +6734,14 @@ namespace VULKAN_HPP_NAMESPACE { } - std::tuple> asTuple() + VULKAN_HPP_DEPRECATED( + "asTuple() on an l-value is deprecated, as it implicitly moves the UniqueHandle out of the ResultValue. Use asTuple() on an r-value instead, requiring to explicitly move the UniqueHandle." ) + std::tuple> asTuple() & + { + return std::make_tuple( result, std::move( value ) ); + } + + std::tuple> asTuple() && { return std::make_tuple( result, std::move( value ) ); } @@ -6756,7 +6763,14 @@ namespace VULKAN_HPP_NAMESPACE { } - std::tuple>> asTuple() + VULKAN_HPP_DEPRECATED( + "asTuple() on an l-value is deprecated, as it implicitly moves the UniqueHandle out of the ResultValue. Use asTuple() on an r-value instead, requiring to explicitly move the UniqueHandle." ) + std::tuple>> asTuple() & + { + return std::make_tuple( result, std::move( value ) ); + } + + std::tuple>> asTuple() && { return std::make_tuple( result, std::move( value ) ); } @@ -6826,7 +6840,7 @@ namespace VULKAN_HPP_NAMESPACE #else if ( result != Result::eSuccess ) { - throwResultException( result, message ); + detail::throwResultException( result, message ); } #endif } @@ -6841,10 +6855,96 @@ namespace VULKAN_HPP_NAMESPACE #else if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) { - throwResultException( result, message ); + detail::throwResultException( result, message ); } #endif } + + //========================================= + //=== CONSTEXPR CONSTANTs AND FUNCTIONs === + //========================================= + VULKAN_HPP_CONSTEXPR uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED; + VULKAN_HPP_CONSTEXPR uint32_t False = VK_FALSE; + VULKAN_HPP_CONSTEXPR float LodClampNone = VK_LOD_CLAMP_NONE; + VULKAN_HPP_CONSTEXPR uint32_t LuidSize = VK_LUID_SIZE; + VULKAN_HPP_CONSTEXPR uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE; + VULKAN_HPP_CONSTEXPR uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE; + VULKAN_HPP_CONSTEXPR uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE; + VULKAN_HPP_CONSTEXPR uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE; + VULKAN_HPP_CONSTEXPR uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE; + VULKAN_HPP_CONSTEXPR uint32_t MaxGlobalPrioritySizeKhr = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; + VULKAN_HPP_CONSTEXPR uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS; + VULKAN_HPP_CONSTEXPR uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES; + VULKAN_HPP_CONSTEXPR uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; + VULKAN_HPP_CONSTEXPR uint32_t MaxShaderModuleIdentifierSizeExt = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; + VULKAN_HPP_CONSTEXPR uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL; + VULKAN_HPP_CONSTEXPR uint32_t QueueFamilyForeignExt = VK_QUEUE_FAMILY_FOREIGN_EXT; + VULKAN_HPP_CONSTEXPR uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED; + VULKAN_HPP_CONSTEXPR uint32_t Remaining3DSlicesExt = VK_REMAINING_3D_SLICES_EXT; + VULKAN_HPP_CONSTEXPR uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS; + VULKAN_HPP_CONSTEXPR uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS; + VULKAN_HPP_CONSTEXPR uint32_t ShaderUnusedKhr = VK_SHADER_UNUSED_KHR; + VULKAN_HPP_CONSTEXPR uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL; + VULKAN_HPP_CONSTEXPR uint32_t True = VK_TRUE; + VULKAN_HPP_CONSTEXPR uint32_t UuidSize = VK_UUID_SIZE; + VULKAN_HPP_CONSTEXPR uint64_t WholeSize = VK_WHOLE_SIZE; + VULKAN_HPP_CONSTEXPR uint32_t HeaderVersion = VK_HEADER_VERSION; + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionMajor( T const version ) + { + return ( ( ( uint32_t )( version ) >> 22U ) & 0x7FU ); + } + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionMinor( T const version ) + { + return ( ( ( uint32_t )( version ) >> 12U ) & 0x3FFU ); + } + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionPatch( T const version ) + { + return ( ( uint32_t )(version)&0xFFFU ); + } + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t apiVersionVariant( T const version ) + { + return ( ( uint32_t )( version ) >> 29U ); + } + template ::value>::type> + VULKAN_HPP_CONSTEXPR uint32_t makeApiVersion( T const variant, T const major, T const minor, T const patch ) + { + return ( ( ( ( uint32_t )( variant ) ) << 29U ) | ( ( ( uint32_t )( major ) ) << 22U ) | ( ( ( uint32_t )( minor ) ) << 12U ) | ( ( uint32_t )( patch ) ) ); + } + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_MAKE_API_VERSION should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t makeVersion( T const major, T const minor, T const patch ) + { + return ( ( ( ( uint32_t )( major ) ) << 22U ) | ( ( ( uint32_t )( minor ) ) << 12U ) | ( ( uint32_t )( patch ) ) ); + } + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_MAJOR should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionMajor( T const version ) + { + return ( ( uint32_t )( version ) >> 22U ); + } + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_MINOR should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionMinor( T const version ) + { + return ( ( ( uint32_t )( version ) >> 12U ) & 0x3FFU ); + } + template ::value>::type> + VULKAN_HPP_DEPRECATED( "This define is deprecated. VK_API_VERSION_PATCH should be used instead." ) + VULKAN_HPP_CONSTEXPR uint32_t versionPatch( T const version ) + { + return ( ( uint32_t )(version)&0xFFFU ); + } + VULKAN_HPP_CONSTEXPR auto ApiVersion = makeApiVersion( 0, 1, 0, 0 ); + VULKAN_HPP_CONSTEXPR auto ApiVersion10 = makeApiVersion( 0, 1, 0, 0 ); + VULKAN_HPP_CONSTEXPR auto ApiVersion11 = makeApiVersion( 0, 1, 1, 0 ); + VULKAN_HPP_CONSTEXPR auto ApiVersion12 = makeApiVersion( 0, 1, 2, 0 ); + VULKAN_HPP_CONSTEXPR auto ApiVersion13 = makeApiVersion( 0, 1, 3, 0 ); + VULKAN_HPP_CONSTEXPR auto HeaderVersionComplete = makeApiVersion( 0, 1, 3, VK_HEADER_VERSION ); + } // namespace VULKAN_HPP_NAMESPACE // clang-format off diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h index 6fed7cf..b56786b 100644 --- a/include/vulkan/vulkan_core.h +++ b/include/vulkan/vulkan_core.h @@ -68,7 +68,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 255 +#define VK_HEADER_VERSION 256 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -6565,6 +6565,7 @@ static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0 static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT = 0x00080000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT = 0x00100000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI = 0x8000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR = 0x10000000ULL; @@ -15128,7 +15129,7 @@ typedef struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT { #define VK_HUAWEI_subpass_shading 1 -#define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 2 +#define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 3 #define VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME "VK_HUAWEI_subpass_shading" typedef struct VkSubpassShadingPipelineCreateInfoHUAWEI { VkStructureType sType; diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp index 1ec15f3..9dc2a33 100644 --- a/include/vulkan/vulkan_enums.hpp +++ b/include/vulkan/vulkan_enums.hpp @@ -3755,6 +3755,7 @@ namespace VULKAN_HPP_NAMESPACE eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV, eTaskShaderEXT = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT, eMeshShaderEXT = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT, + eSubpassShaderHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI, eSubpassShadingHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI, eInvocationMaskHUAWEI = VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI, eAccelerationStructureCopyKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR, @@ -3786,7 +3787,7 @@ namespace VULKAN_HPP_NAMESPACE | PipelineStageFlagBits2::eTransformFeedbackEXT | PipelineStageFlagBits2::eConditionalRenderingEXT | PipelineStageFlagBits2::eCommandPreprocessNV | PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR | PipelineStageFlagBits2::eAccelerationStructureBuildKHR | PipelineStageFlagBits2::eRayTracingShaderKHR | PipelineStageFlagBits2::eFragmentDensityProcessEXT | PipelineStageFlagBits2::eTaskShaderEXT | - PipelineStageFlagBits2::eMeshShaderEXT | PipelineStageFlagBits2::eSubpassShadingHUAWEI | PipelineStageFlagBits2::eInvocationMaskHUAWEI | + PipelineStageFlagBits2::eMeshShaderEXT | PipelineStageFlagBits2::eSubpassShaderHUAWEI | PipelineStageFlagBits2::eInvocationMaskHUAWEI | PipelineStageFlagBits2::eAccelerationStructureCopyKHR | PipelineStageFlagBits2::eMicromapBuildEXT | PipelineStageFlagBits2::eClusterCullingShaderHUAWEI | PipelineStageFlagBits2::eOpticalFlowNV; }; diff --git a/include/vulkan/vulkan_format_traits.hpp b/include/vulkan/vulkan_format_traits.hpp index 7cac863..7d72bbe 100644 --- a/include/vulkan/vulkan_format_traits.hpp +++ b/include/vulkan/vulkan_format_traits.hpp @@ -3164,21 +3164,21 @@ namespace VULKAN_HPP_NAMESPACE switch ( component ) { case 0: return "R"; - case 1: return "B"; + case 1: return "G"; default: VULKAN_HPP_ASSERT( false ); return ""; } case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: switch ( component ) { case 0: return "R"; - case 1: return "B"; + case 1: return "G"; default: VULKAN_HPP_ASSERT( false ); return ""; } case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: switch ( component ) { case 0: return "R"; - case 1: return "B"; + case 1: return "G"; default: VULKAN_HPP_ASSERT( false ); return ""; } case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: @@ -5392,7 +5392,7 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: switch ( component ) { - case 0: return "SRGB"; + case 0: return "SNORM"; default: VULKAN_HPP_ASSERT( false ); return ""; } case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: @@ -5405,8 +5405,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: switch ( component ) { - case 0: return "SRGB"; - case 1: return "SRGB"; + case 0: return "SNORM"; + case 1: return "SNORM"; default: VULKAN_HPP_ASSERT( false ); return ""; } case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: diff --git a/include/vulkan/vulkan_raii.hpp b/include/vulkan/vulkan_raii.hpp index 79e9a63..9c1939c 100644 --- a/include/vulkan/vulkan_raii.hpp +++ b/include/vulkan/vulkan_raii.hpp @@ -2644,7 +2644,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_instance ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateInstance" ); + detail::throwResultException( result, "vkCreateInstance" ); } m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, static_cast( m_instance ) ) ); @@ -2679,10 +2679,9 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher.reset( rhs.m_dispatcher.release() ); + std::swap( m_instance, rhs.m_instance ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -2922,8 +2921,8 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -3328,7 +3327,7 @@ namespace VULKAN_HPP_NAMESPACE } else { - throwResultException( result, "vkEnumeratePhysicalDevices" ); + detail::throwResultException( result, "vkEnumeratePhysicalDevices" ); } } @@ -3363,7 +3362,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_device ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateDevice" ); + detail::throwResultException( result, "vkCreateDevice" ); } m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, static_cast( m_device ) ) ); @@ -3398,10 +3397,9 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher.reset( rhs.m_dispatcher.release() ); + std::swap( m_device, rhs.m_device ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4276,7 +4274,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_accelerationStructure ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateAccelerationStructureKHR" ); + detail::throwResultException( result, "vkCreateAccelerationStructureKHR" ); } } @@ -4311,11 +4309,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_accelerationStructure = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructure, rhs.m_accelerationStructure ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4397,7 +4394,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_accelerationStructure ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateAccelerationStructureNV" ); + detail::throwResultException( result, "vkCreateAccelerationStructureNV" ); } } @@ -4432,11 +4429,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_accelerationStructure = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_accelerationStructure, rhs.m_accelerationStructure ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4526,7 +4522,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_buffer ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateBuffer" ); + detail::throwResultException( result, "vkCreateBuffer" ); } } @@ -4561,11 +4557,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_buffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_buffer, rhs.m_buffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4653,7 +4648,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_collection ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateBufferCollectionFUCHSIA" ); + detail::throwResultException( result, "vkCreateBufferCollectionFUCHSIA" ); } } @@ -4688,11 +4683,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_collection = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_collection, rhs.m_collection ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4783,7 +4777,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_bufferView ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateBufferView" ); + detail::throwResultException( result, "vkCreateBufferView" ); } } @@ -4818,11 +4812,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_bufferView = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_bufferView, rhs.m_bufferView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -4903,7 +4896,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_commandPool ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateCommandPool" ); + detail::throwResultException( result, "vkCreateCommandPool" ); } } @@ -4938,11 +4931,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_commandPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -5047,11 +5039,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_commandPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ); - m_commandBuffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_commandPool, rhs.m_commandPool ); + std::swap( m_commandBuffer, rhs.m_commandBuffer ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6009,7 +6000,7 @@ namespace VULKAN_HPP_NAMESPACE } else { - throwResultException( result, "vkAllocateCommandBuffers" ); + detail::throwResultException( result, "vkAllocateCommandBuffers" ); } } @@ -6046,7 +6037,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_function ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateCuFunctionNVX" ); + detail::throwResultException( result, "vkCreateCuFunctionNVX" ); } } @@ -6081,11 +6072,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_function = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_function, rhs.m_function ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6166,7 +6156,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_module ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateCuModuleNVX" ); + detail::throwResultException( result, "vkCreateCuModuleNVX" ); } } @@ -6201,11 +6191,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_module = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_module, rhs.m_module ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6286,7 +6275,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_callback ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateDebugReportCallbackEXT" ); + detail::throwResultException( result, "vkCreateDebugReportCallbackEXT" ); } } @@ -6321,11 +6310,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); - m_callback = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_instance, rhs.m_instance ); + std::swap( m_callback, rhs.m_callback ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6407,7 +6395,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_messenger ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateDebugUtilsMessengerEXT" ); + detail::throwResultException( result, "vkCreateDebugUtilsMessengerEXT" ); } } @@ -6442,11 +6430,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); - m_messenger = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_instance, rhs.m_instance ); + std::swap( m_messenger, rhs.m_messenger ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6526,7 +6513,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_operation ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateDeferredOperationKHR" ); + detail::throwResultException( result, "vkCreateDeferredOperationKHR" ); } } @@ -6561,11 +6548,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_operation = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_operation, rhs.m_operation ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6655,7 +6641,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_descriptorPool ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateDescriptorPool" ); + detail::throwResultException( result, "vkCreateDescriptorPool" ); } } @@ -6690,11 +6676,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_descriptorPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6792,11 +6777,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_descriptorPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ); - m_descriptorSet = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorPool, rhs.m_descriptorPool ); + std::swap( m_descriptorSet, rhs.m_descriptorSet ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -6889,7 +6873,7 @@ namespace VULKAN_HPP_NAMESPACE } else { - throwResultException( result, "vkAllocateDescriptorSets" ); + detail::throwResultException( result, "vkAllocateDescriptorSets" ); } } @@ -6926,7 +6910,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_descriptorSetLayout ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateDescriptorSetLayout" ); + detail::throwResultException( result, "vkCreateDescriptorSetLayout" ); } } @@ -6961,11 +6945,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_descriptorSetLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -7053,7 +7036,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_descriptorUpdateTemplate ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateDescriptorUpdateTemplate" ); + detail::throwResultException( result, "vkCreateDescriptorUpdateTemplate" ); } } @@ -7088,11 +7071,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_descriptorUpdateTemplate = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -7174,7 +7156,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_memory ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkAllocateMemory" ); + detail::throwResultException( result, "vkAllocateMemory" ); } } @@ -7209,11 +7191,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_memory = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_memory, rhs.m_memory ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -7307,7 +7288,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( *physicalDevice ), drmFd, connectorId, reinterpret_cast( &m_display ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkGetDrmDisplayEXT" ); + detail::throwResultException( result, "vkGetDrmDisplayEXT" ); } } @@ -7319,7 +7300,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( *physicalDevice ), &dpy, rrOutput, reinterpret_cast( &m_display ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkGetRandROutputDisplayEXT" ); + detail::throwResultException( result, "vkGetRandROutputDisplayEXT" ); } } # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ @@ -7332,7 +7313,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( *physicalDevice ), deviceRelativeId, reinterpret_cast( &m_display ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkGetWinrtDisplayNV" ); + detail::throwResultException( result, "vkGetWinrtDisplayNV" ); } } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -7362,10 +7343,9 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); - m_display = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_display, rhs.m_display ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -7466,7 +7446,7 @@ namespace VULKAN_HPP_NAMESPACE } else { - throwResultException( result, "vkGetDisplayPlaneSupportedDisplaysKHR" ); + detail::throwResultException( result, "vkGetDisplayPlaneSupportedDisplaysKHR" ); } } @@ -7502,7 +7482,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_displayModeKHR ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateDisplayModeKHR" ); + detail::throwResultException( result, "vkCreateDisplayModeKHR" ); } } @@ -7536,9 +7516,9 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - m_physicalDevice = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); - m_displayModeKHR = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_physicalDevice, rhs.m_physicalDevice ); + std::swap( m_displayModeKHR, rhs.m_displayModeKHR ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -7609,7 +7589,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_event ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateEvent" ); + detail::throwResultException( result, "vkCreateEvent" ); } } @@ -7644,11 +7624,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_event = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_event, rhs.m_event ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -7737,7 +7716,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_fence ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateFence" ); + detail::throwResultException( result, "vkCreateFence" ); } } @@ -7755,7 +7734,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_fence ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkRegisterDeviceEventEXT" ); + detail::throwResultException( result, "vkRegisterDeviceEventEXT" ); } } @@ -7775,7 +7754,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_fence ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkRegisterDisplayEventEXT" ); + detail::throwResultException( result, "vkRegisterDisplayEventEXT" ); } } @@ -7810,11 +7789,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_fence = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_fence, rhs.m_fence ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -7899,7 +7877,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_framebuffer ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateFramebuffer" ); + detail::throwResultException( result, "vkCreateFramebuffer" ); } } @@ -7934,11 +7912,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_framebuffer = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_framebuffer, rhs.m_framebuffer ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -8023,7 +8000,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_image ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateImage" ); + detail::throwResultException( result, "vkCreateImage" ); } } @@ -8058,11 +8035,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_image = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_image, rhs.m_image ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -8167,7 +8143,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_imageView ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateImageView" ); + detail::throwResultException( result, "vkCreateImageView" ); } } @@ -8202,11 +8178,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_imageView = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_imageView, rhs.m_imageView ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -8291,7 +8266,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_indirectCommandsLayout ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateIndirectCommandsLayoutNV" ); + detail::throwResultException( result, "vkCreateIndirectCommandsLayoutNV" ); } } @@ -8326,11 +8301,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_indirectCommandsLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -8412,7 +8386,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_micromap ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateMicromapEXT" ); + detail::throwResultException( result, "vkCreateMicromapEXT" ); } } @@ -8447,11 +8421,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_micromap = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_micromap, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_micromap, rhs.m_micromap ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -8532,7 +8505,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_session ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateOpticalFlowSessionNV" ); + detail::throwResultException( result, "vkCreateOpticalFlowSessionNV" ); } } @@ -8567,11 +8540,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_session = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_session, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_session, rhs.m_session ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -8655,7 +8627,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_configuration ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkAcquirePerformanceConfigurationINTEL" ); + detail::throwResultException( result, "vkAcquirePerformanceConfigurationINTEL" ); } } @@ -8684,10 +8656,9 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_configuration = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_configuration, rhs.m_configuration ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -8764,7 +8735,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_pipelineCache ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreatePipelineCache" ); + detail::throwResultException( result, "vkCreatePipelineCache" ); } } @@ -8799,11 +8770,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_pipelineCache = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineCache, rhs.m_pipelineCache ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -8895,7 +8865,7 @@ namespace VULKAN_HPP_NAMESPACE if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { - throwResultException( m_constructorSuccessCode, "vkCreateComputePipelines" ); + detail::throwResultException( m_constructorSuccessCode, "vkCreateComputePipelines" ); } } @@ -8917,7 +8887,7 @@ namespace VULKAN_HPP_NAMESPACE if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { - throwResultException( m_constructorSuccessCode, "vkCreateGraphicsPipelines" ); + detail::throwResultException( m_constructorSuccessCode, "vkCreateGraphicsPipelines" ); } } @@ -8943,7 +8913,7 @@ namespace VULKAN_HPP_NAMESPACE ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) && ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { - throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesKHR" ); + detail::throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesKHR" ); } } @@ -8965,7 +8935,7 @@ namespace VULKAN_HPP_NAMESPACE if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) { - throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesNV" ); + detail::throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesNV" ); } } @@ -9003,12 +8973,11 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_pipeline = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_pipeline, rhs.m_pipeline ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -9134,7 +9103,7 @@ namespace VULKAN_HPP_NAMESPACE } else { - throwResultException( result, "vkCreateComputePipelines" ); + detail::throwResultException( result, "vkCreateComputePipelines" ); } } @@ -9162,7 +9131,7 @@ namespace VULKAN_HPP_NAMESPACE } else { - throwResultException( result, "vkCreateGraphicsPipelines" ); + detail::throwResultException( result, "vkCreateGraphicsPipelines" ); } } @@ -9193,7 +9162,7 @@ namespace VULKAN_HPP_NAMESPACE } else { - throwResultException( result, "vkCreateRayTracingPipelinesKHR" ); + detail::throwResultException( result, "vkCreateRayTracingPipelinesKHR" ); } } @@ -9221,7 +9190,7 @@ namespace VULKAN_HPP_NAMESPACE } else { - throwResultException( result, "vkCreateRayTracingPipelinesNV" ); + detail::throwResultException( result, "vkCreateRayTracingPipelinesNV" ); } } @@ -9258,7 +9227,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_pipelineLayout ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreatePipelineLayout" ); + detail::throwResultException( result, "vkCreatePipelineLayout" ); } } @@ -9293,11 +9262,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_pipelineLayout = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineLayout, rhs.m_pipelineLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -9379,7 +9347,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_privateDataSlot ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreatePrivateDataSlot" ); + detail::throwResultException( result, "vkCreatePrivateDataSlot" ); } } @@ -9414,11 +9382,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_privateDataSlot = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_privateDataSlot, rhs.m_privateDataSlot ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -9500,7 +9467,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_queryPool ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateQueryPool" ); + detail::throwResultException( result, "vkCreateQueryPool" ); } } @@ -9535,11 +9502,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_queryPool = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_queryPool, rhs.m_queryPool ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -9672,8 +9638,8 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - m_queue = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queue, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_queue, rhs.m_queue ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -9778,7 +9744,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_renderPass ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateRenderPass" ); + detail::throwResultException( result, "vkCreateRenderPass" ); } } @@ -9796,7 +9762,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_renderPass ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateRenderPass2" ); + detail::throwResultException( result, "vkCreateRenderPass2" ); } } @@ -9831,11 +9797,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_renderPass = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_renderPass, rhs.m_renderPass ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -9924,7 +9889,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_sampler ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateSampler" ); + detail::throwResultException( result, "vkCreateSampler" ); } } @@ -9959,11 +9924,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_sampler = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_sampler, rhs.m_sampler ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -10044,7 +10008,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_ycbcrConversion ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateSamplerYcbcrConversion" ); + detail::throwResultException( result, "vkCreateSamplerYcbcrConversion" ); } } @@ -10079,11 +10043,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_ycbcrConversion = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -10165,7 +10128,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_semaphore ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateSemaphore" ); + detail::throwResultException( result, "vkCreateSemaphore" ); } } @@ -10200,11 +10163,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_semaphore = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_semaphore, rhs.m_semaphore ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -10294,7 +10256,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_shader ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateShadersEXT" ); + detail::throwResultException( result, "vkCreateShadersEXT" ); } } @@ -10329,11 +10291,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_shader = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_shader, rhs.m_shader ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -10419,7 +10380,7 @@ namespace VULKAN_HPP_NAMESPACE } else { - throwResultException( result, "vkCreateShadersEXT" ); + detail::throwResultException( result, "vkCreateShadersEXT" ); } } @@ -10456,7 +10417,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_shaderModule ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateShaderModule" ); + detail::throwResultException( result, "vkCreateShaderModule" ); } } @@ -10491,11 +10452,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_shaderModule = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_shaderModule, rhs.m_shaderModule ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -10581,7 +10541,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateAndroidSurfaceKHR" ); + detail::throwResultException( result, "vkCreateAndroidSurfaceKHR" ); } } # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ @@ -10601,7 +10561,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateDirectFBSurfaceEXT" ); + detail::throwResultException( result, "vkCreateDirectFBSurfaceEXT" ); } } # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ @@ -10620,7 +10580,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateDisplayPlaneSurfaceKHR" ); + detail::throwResultException( result, "vkCreateDisplayPlaneSurfaceKHR" ); } } @@ -10638,7 +10598,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateHeadlessSurfaceEXT" ); + detail::throwResultException( result, "vkCreateHeadlessSurfaceEXT" ); } } @@ -10657,7 +10617,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateIOSSurfaceMVK" ); + detail::throwResultException( result, "vkCreateIOSSurfaceMVK" ); } } # endif /*VK_USE_PLATFORM_IOS_MVK*/ @@ -10677,7 +10637,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateImagePipeSurfaceFUCHSIA" ); + detail::throwResultException( result, "vkCreateImagePipeSurfaceFUCHSIA" ); } } # endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -10697,7 +10657,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateMacOSSurfaceMVK" ); + detail::throwResultException( result, "vkCreateMacOSSurfaceMVK" ); } } # endif /*VK_USE_PLATFORM_MACOS_MVK*/ @@ -10717,7 +10677,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateMetalSurfaceEXT" ); + detail::throwResultException( result, "vkCreateMetalSurfaceEXT" ); } } # endif /*VK_USE_PLATFORM_METAL_EXT*/ @@ -10737,7 +10697,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateScreenSurfaceQNX" ); + detail::throwResultException( result, "vkCreateScreenSurfaceQNX" ); } } # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ @@ -10757,7 +10717,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateStreamDescriptorSurfaceGGP" ); + detail::throwResultException( result, "vkCreateStreamDescriptorSurfaceGGP" ); } } # endif /*VK_USE_PLATFORM_GGP*/ @@ -10777,7 +10737,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateViSurfaceNN" ); + detail::throwResultException( result, "vkCreateViSurfaceNN" ); } } # endif /*VK_USE_PLATFORM_VI_NN*/ @@ -10797,7 +10757,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateWaylandSurfaceKHR" ); + detail::throwResultException( result, "vkCreateWaylandSurfaceKHR" ); } } # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ @@ -10817,7 +10777,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateWin32SurfaceKHR" ); + detail::throwResultException( result, "vkCreateWin32SurfaceKHR" ); } } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -10837,7 +10797,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateXcbSurfaceKHR" ); + detail::throwResultException( result, "vkCreateXcbSurfaceKHR" ); } } # endif /*VK_USE_PLATFORM_XCB_KHR*/ @@ -10857,7 +10817,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_surface ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateXlibSurfaceKHR" ); + detail::throwResultException( result, "vkCreateXlibSurfaceKHR" ); } } # endif /*VK_USE_PLATFORM_XLIB_KHR*/ @@ -10893,11 +10853,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_instance = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ); - m_surface = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_instance, rhs.m_instance ); + std::swap( m_surface, rhs.m_surface ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -10978,7 +10937,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_swapchain ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateSwapchainKHR" ); + detail::throwResultException( result, "vkCreateSwapchainKHR" ); } } @@ -11013,11 +10972,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_swapchain = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_swapchain, rhs.m_swapchain ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -11138,7 +11096,7 @@ namespace VULKAN_HPP_NAMESPACE } else { - throwResultException( result, "vkCreateSharedSwapchainsKHR" ); + detail::throwResultException( result, "vkCreateSharedSwapchainsKHR" ); } } @@ -11175,7 +11133,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_validationCache ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateValidationCacheEXT" ); + detail::throwResultException( result, "vkCreateValidationCacheEXT" ); } } @@ -11210,11 +11168,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_validationCache = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_validationCache, rhs.m_validationCache ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -11302,7 +11259,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_videoSession ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateVideoSessionKHR" ); + detail::throwResultException( result, "vkCreateVideoSessionKHR" ); } } @@ -11337,11 +11294,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_videoSession = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSession, rhs.m_videoSession ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } @@ -11429,7 +11385,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &m_videoSessionParameters ) ) ); if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) { - throwResultException( result, "vkCreateVideoSessionParametersKHR" ); + detail::throwResultException( result, "vkCreateVideoSessionParametersKHR" ); } } @@ -11464,11 +11420,10 @@ namespace VULKAN_HPP_NAMESPACE { if ( this != &rhs ) { - clear(); - m_device = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ); - m_videoSessionParameters = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ); - m_allocator = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ); - m_dispatcher = VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ); + std::swap( m_device, rhs.m_device ); + std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); } return *this; } diff --git a/include/vulkan/vulkan_to_string.hpp b/include/vulkan/vulkan_to_string.hpp index c7d9332..359fa6d 100644 --- a/include/vulkan/vulkan_to_string.hpp +++ b/include/vulkan/vulkan_to_string.hpp @@ -1589,8 +1589,8 @@ namespace VULKAN_HPP_NAMESPACE result += "TaskShaderEXT | "; if ( value & PipelineStageFlagBits2::eMeshShaderEXT ) result += "MeshShaderEXT | "; - if ( value & PipelineStageFlagBits2::eSubpassShadingHUAWEI ) - result += "SubpassShadingHUAWEI | "; + if ( value & PipelineStageFlagBits2::eSubpassShaderHUAWEI ) + result += "SubpassShaderHUAWEI | "; if ( value & PipelineStageFlagBits2::eInvocationMaskHUAWEI ) result += "InvocationMaskHUAWEI | "; if ( value & PipelineStageFlagBits2::eAccelerationStructureCopyKHR ) @@ -6277,7 +6277,7 @@ namespace VULKAN_HPP_NAMESPACE case PipelineStageFlagBits2::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; case PipelineStageFlagBits2::eTaskShaderEXT: return "TaskShaderEXT"; case PipelineStageFlagBits2::eMeshShaderEXT: return "MeshShaderEXT"; - case PipelineStageFlagBits2::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; + case PipelineStageFlagBits2::eSubpassShaderHUAWEI: return "SubpassShaderHUAWEI"; case PipelineStageFlagBits2::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI"; case PipelineStageFlagBits2::eAccelerationStructureCopyKHR: return "AccelerationStructureCopyKHR"; case PipelineStageFlagBits2::eMicromapBuildEXT: return "MicromapBuildEXT"; diff --git a/registry/validusage.json b/registry/validusage.json index 5000009..5546e28 100644 --- a/registry/validusage.json +++ b/registry/validusage.json @@ -1,9 +1,9 @@ { "version info": { "schema version": 2, - "api version": "1.3.255", - "comment": "from git branch: github-main commit: 012db30fd16929f9fd30dfbc2a7c86e048d64015", - "date": "2023-06-23 11:41:13Z" + "api version": "1.3.256", + "comment": "from git branch: github-main commit: 3dae5d7fbf332970ae0a97d5ab05ae5db93e62f0", + "date": "2023-06-30 12:46:10Z" }, "validation": { "vkGetInstanceProcAddr": { @@ -1638,7 +1638,7 @@ }, { "vuid": "VUID-VkSemaphoreSubmitInfo-stageMask-04957", - "text": " If the subpassShading feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI" + "text": " If the subpassShading feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI" }, { "vuid": "VUID-VkSemaphoreSubmitInfo-stageMask-04995", @@ -3810,7 +3810,7 @@ }, { "vuid": "VUID-vkCmdResetEvent2-stageMask-04957", - "text": " If the subpassShading feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI" + "text": " If the subpassShading feature is not enabled, pname:stageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI" }, { "vuid": "VUID-vkCmdResetEvent2-stageMask-04995", @@ -4530,7 +4530,7 @@ }, { "vuid": "VUID-VkMemoryBarrier2-srcStageMask-04957", - "text": " If the subpassShading feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI" + "text": " If the subpassShading feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI" }, { "vuid": "VUID-VkMemoryBarrier2-srcStageMask-04995", @@ -4554,7 +4554,7 @@ }, { "vuid": "VUID-VkMemoryBarrier2-srcAccessMask-03903", - "text": " If pname:srcAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" }, { "vuid": "VUID-VkMemoryBarrier2-srcAccessMask-03904", @@ -4738,7 +4738,7 @@ }, { "vuid": "VUID-VkMemoryBarrier2-dstStageMask-04957", - "text": " If the subpassShading feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI" + "text": " If the subpassShading feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI" }, { "vuid": "VUID-VkMemoryBarrier2-dstStageMask-04995", @@ -4762,7 +4762,7 @@ }, { "vuid": "VUID-VkMemoryBarrier2-dstAccessMask-03903", - "text": " If pname:dstAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" }, { "vuid": "VUID-VkMemoryBarrier2-dstAccessMask-03904", @@ -4990,7 +4990,7 @@ }, { "vuid": "VUID-VkBufferMemoryBarrier2-srcStageMask-04957", - "text": " If the subpassShading feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI" + "text": " If the subpassShading feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI" }, { "vuid": "VUID-VkBufferMemoryBarrier2-srcStageMask-04995", @@ -5014,7 +5014,7 @@ }, { "vuid": "VUID-VkBufferMemoryBarrier2-srcAccessMask-03903", - "text": " If pname:srcAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" }, { "vuid": "VUID-VkBufferMemoryBarrier2-srcAccessMask-03904", @@ -5198,7 +5198,7 @@ }, { "vuid": "VUID-VkBufferMemoryBarrier2-dstStageMask-04957", - "text": " If the subpassShading feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI" + "text": " If the subpassShading feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI" }, { "vuid": "VUID-VkBufferMemoryBarrier2-dstStageMask-04995", @@ -5222,7 +5222,7 @@ }, { "vuid": "VUID-VkBufferMemoryBarrier2-dstAccessMask-03903", - "text": " If pname:dstAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" }, { "vuid": "VUID-VkBufferMemoryBarrier2-dstAccessMask-03904", @@ -5526,7 +5526,7 @@ }, { "vuid": "VUID-VkImageMemoryBarrier2-srcStageMask-04957", - "text": " If the subpassShading feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI" + "text": " If the subpassShading feature is not enabled, pname:srcStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI" }, { "vuid": "VUID-VkImageMemoryBarrier2-srcStageMask-04995", @@ -5550,7 +5550,7 @@ }, { "vuid": "VUID-VkImageMemoryBarrier2-srcAccessMask-03903", - "text": " If pname:srcAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" + "text": " If pname:srcAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:srcStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier2-srcAccessMask-03904", @@ -5734,7 +5734,7 @@ }, { "vuid": "VUID-VkImageMemoryBarrier2-dstStageMask-04957", - "text": " If the subpassShading feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI" + "text": " If the subpassShading feature is not enabled, pname:dstStageMask must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI" }, { "vuid": "VUID-VkImageMemoryBarrier2-dstStageMask-04995", @@ -5758,7 +5758,7 @@ }, { "vuid": "VUID-VkImageMemoryBarrier2-dstAccessMask-03903", - "text": " If pname:dstAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" + "text": " If pname:dstAccessMask includes VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT, pname:dstStageMask must include VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI, VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT, or VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT" }, { "vuid": "VUID-VkImageMemoryBarrier2-dstAccessMask-03904", @@ -9997,8 +9997,8 @@ "text": " If the fragmentDensityMap feature is not enabled, flags must not include VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT" }, { - "vuid": "VUID-VkShaderCreateInfoEXT-flags-08413", - "text": " If stage is not VK_SHADER_STAGE_COMPUTE_BIT, flags must not include VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT or VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT" + "vuid": "VUID-VkShaderCreateInfoEXT-flags-08992", + "text": " If flags includes VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT, stage must be one of VK_SHADER_STAGE_MESH_BIT_EXT, VK_SHADER_STAGE_TASK_BIT_EXT, or VK_SHADER_STAGE_COMPUTE_BIT" }, { "vuid": "VUID-VkShaderCreateInfoEXT-flags-08485", @@ -11080,6 +11080,10 @@ "vuid": "VUID-VkPipelineShaderStageCreateInfo-flags-02785", "text": " If flags has the VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT flag set, the computeFullSubgroups feature must be enabled" }, + { + "vuid": "VUID-VkPipelineShaderStageCreateInfo-flags-08988", + "text": " If flags includes VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT, stage must be one of VK_SHADER_STAGE_MESH_BIT_EXT, VK_SHADER_STAGE_TASK_BIT_EXT, or VK_SHADER_STAGE_COMPUTE_BIT" + }, { "vuid": "VUID-VkPipelineShaderStageCreateInfo-pNext-02754", "text": " If a VkPipelineShaderStageRequiredSubgroupSizeCreateInfo structure is included in the pNext chain, flags must not have the VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT flag set" @@ -11105,7 +11109,7 @@ "text": " If flags has the VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT flag set and flags does not have the VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT flag set and no VkPipelineShaderStageRequiredSubgroupSizeCreateInfo structure is included in the pNext chain, the local workgroup size in the X dimension of the pipeline must be a multiple of subgroupSize" }, { - "vuid": "VUID-VkPipelineShaderStageCreateInfo-flags-", + "vuid": "VUID-VkPipelineShaderStageCreateInfo-module-08987", "text": " If module uses the OpTypeCooperativeMatrixKHR instruction with a Scope equal to Subgroup, then the local workgroup size in the X dimension of the pipeline must be a multiple of subgroupSize." }, { @@ -13584,6 +13588,10 @@ "vuid": "VUID-VkPipelineCacheHeaderVersionOne-headerVersion-04968", "text": " headerVersion must be VK_PIPELINE_CACHE_HEADER_VERSION_ONE" }, + { + "vuid": "VUID-VkPipelineCacheHeaderVersionOne-headerSize-08990", + "text": " headerSize must not exceed the size of the pipeline cache" + }, { "vuid": "VUID-VkPipelineCacheHeaderVersionOne-headerVersion-parameter", "text": " headerVersion must be a valid VkPipelineCacheHeaderVersion value" @@ -14967,6 +14975,10 @@ { "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-handleType-parameter", "text": " handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" + }, + { + "vuid": "VUID-VkImportMemoryHostPointerInfoEXT-pHostPointer-parameter", + "text": " pHostPointer must be a pointer value" } ] }, @@ -14996,6 +15008,10 @@ "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-handleType-parameter", "text": " handleType must be a valid VkExternalMemoryHandleTypeFlagBits value" }, + { + "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-pHostPointer-parameter", + "text": " pHostPointer must be a pointer value" + }, { "vuid": "VUID-vkGetMemoryHostPointerPropertiesEXT-pMemoryHostPointerProperties-parameter", "text": " pMemoryHostPointerProperties must be a valid pointer to a VkMemoryHostPointerPropertiesEXT structure" @@ -17042,7 +17058,11 @@ }, { "vuid": "VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-pPlaneLayouts-parameter", - "text": " If drmFormatModifierPlaneCount is not 0, pPlaneLayouts must be a valid pointer to an array of drmFormatModifierPlaneCount VkSubresourceLayout structures" + "text": " pPlaneLayouts must be a valid pointer to an array of drmFormatModifierPlaneCount VkSubresourceLayout structures" + }, + { + "vuid": "VUID-VkImageDrmFormatModifierExplicitCreateInfoEXT-drmFormatModifierPlaneCount-arraylength", + "text": " drmFormatModifierPlaneCount must be greater than 0" } ] }, @@ -25270,7 +25290,7 @@ }, { "vuid": "VUID-vkCmdResetQueryPool-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, compute, decode, encode, or opticalflow operations" + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, compute, decode, encode, or optical flow operations" }, { "vuid": "VUID-vkCmdResetQueryPool-renderpass", @@ -25986,7 +26006,7 @@ }, { "vuid": "VUID-vkCmdWriteTimestamp2-stage-04957", - "text": " If the subpassShading feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI" + "text": " If the subpassShading feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI" }, { "vuid": "VUID-vkCmdWriteTimestamp2-stage-04995", @@ -26146,7 +26166,7 @@ }, { "vuid": "VUID-vkCmdWriteTimestamp-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, compute, decode, encode, or opticalflow operations" + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, compute, decode, encode, or optical flow operations" }, { "vuid": "VUID-vkCmdWriteTimestamp-commonparent", @@ -30186,7 +30206,7 @@ }, { "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-04957", - "text": " If the subpassShading feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI" + "text": " If the subpassShading feature is not enabled, pname:stage must not contain VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI" }, { "vuid": "VUID-vkCmdWriteBufferMarker2AMD-stage-04995", @@ -30390,7 +30410,7 @@ "core": [ { "vuid": "VUID-vkCmdSetPrimitiveRestartEnable-None-08970", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetPrimitiveRestartEnable-commandBuffer-parameter", @@ -30414,7 +30434,7 @@ "core": [ { "vuid": "VUID-vkCmdSetPrimitiveTopology-None-08971", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetPrimitiveTopology-commandBuffer-parameter", @@ -30550,7 +30570,7 @@ }, { "vuid": "VUID-vkCmdDraw-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDraw-None-08601", @@ -31842,7 +31862,7 @@ }, { "vuid": "VUID-vkCmdDrawIndexed-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawIndexed-None-08601", @@ -33142,7 +33162,7 @@ }, { "vuid": "VUID-vkCmdDrawMultiEXT-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawMultiEXT-None-08601", @@ -34450,7 +34470,7 @@ }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-08601", @@ -35770,7 +35790,7 @@ }, { "vuid": "VUID-vkCmdDrawIndirect-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawIndirect-None-08601", @@ -37106,7 +37126,7 @@ }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-08601", @@ -38454,7 +38474,7 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-08601", @@ -39798,7 +39818,7 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-08601", @@ -41154,7 +41174,7 @@ }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-08601", @@ -42566,7 +42586,7 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-None-08601", @@ -43790,7 +43810,7 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-08601", @@ -45062,7 +45082,7 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-08601", @@ -46350,7 +46370,7 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-08601", @@ -47602,7 +47622,7 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-08601", @@ -48902,7 +48922,7 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-08601", @@ -50190,7 +50210,7 @@ }, { "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-08601", @@ -51426,7 +51446,7 @@ }, { "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-08601", @@ -53654,7 +53674,7 @@ "core": [ { "vuid": "VUID-vkCmdSetViewportWithCount-None-08971", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetViewportWithCount-viewportCount-03394", @@ -53698,7 +53718,7 @@ "core": [ { "vuid": "VUID-vkCmdSetScissorWithCount-None-08971", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetScissorWithCount-scissorCount-03397", @@ -53954,7 +53974,7 @@ "core": [ { "vuid": "VUID-vkCmdSetRasterizerDiscardEnable-None-08970", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetRasterizerDiscardEnable-commandBuffer-parameter", @@ -54766,7 +54786,7 @@ "core": [ { "vuid": "VUID-vkCmdSetFrontFace-None-08971", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetFrontFace-commandBuffer-parameter", @@ -54794,7 +54814,7 @@ "core": [ { "vuid": "VUID-vkCmdSetCullMode-None-08971", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetCullMode-commandBuffer-parameter", @@ -54858,7 +54878,7 @@ "core": [ { "vuid": "VUID-vkCmdSetDepthBiasEnable-None-08970", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetDepthBiasEnable-commandBuffer-parameter", @@ -55462,7 +55482,7 @@ "core": [ { "vuid": "VUID-vkCmdSetDepthBoundsTestEnable-None-08971", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetDepthBoundsTestEnable-commandBuffer-parameter", @@ -55514,7 +55534,7 @@ "core": [ { "vuid": "VUID-vkCmdSetStencilTestEnable-None-08971", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetStencilTestEnable-commandBuffer-parameter", @@ -55538,7 +55558,7 @@ "core": [ { "vuid": "VUID-vkCmdSetStencilOp-None-08971", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetStencilOp-commandBuffer-parameter", @@ -55690,7 +55710,7 @@ "core": [ { "vuid": "VUID-vkCmdSetDepthTestEnable-None-08971", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetDepthTestEnable-commandBuffer-parameter", @@ -55714,7 +55734,7 @@ "core": [ { "vuid": "VUID-vkCmdSetDepthCompareOp-None-08971", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetDepthCompareOp-commandBuffer-parameter", @@ -55742,7 +55762,7 @@ "core": [ { "vuid": "VUID-vkCmdSetDepthWriteEnable-None-08971", - "text": " At least one of the following must be true:" + "text": " At least one of the following must be true:
\n\n
" }, { "vuid": "VUID-vkCmdSetDepthWriteEnable-commandBuffer-parameter", @@ -56602,7 +56622,7 @@ }, { "vuid": "VUID-vkCmdDispatch-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDispatch-None-08601", @@ -56870,7 +56890,7 @@ }, { "vuid": "VUID-vkCmdDispatchIndirect-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDispatchIndirect-None-08601", @@ -57158,7 +57178,7 @@ }, { "vuid": "VUID-vkCmdDispatchBase-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdDispatchBase-None-08601", @@ -57442,7 +57462,7 @@ }, { "vuid": "VUID-vkCmdSubpassShadingHUAWEI-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdSubpassShadingHUAWEI-None-08601", @@ -58018,7 +58038,7 @@ }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-08601", @@ -65958,7 +65978,7 @@ }, { "vuid": "VUID-vkCmdTraceRaysNV-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdTraceRaysNV-None-08601", @@ -66318,7 +66338,7 @@ }, { "vuid": "VUID-vkCmdTraceRaysKHR-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdTraceRaysKHR-None-08601", @@ -66701,8 +66721,8 @@ "text": " Each element in the invocation mask image must have the value 0 or 1. The value 1 means the invocation is active" }, { - "vuid": "VUID-vkCmdBindInvocationMaskHUAWEI-width-04983", - "text": " width in vkCmdTraceRaysKHR should be 1" + "vuid": "VUID-vkCmdBindInvocationMaskHUAWEI-depth-04983", + "text": " depth in vkCmdTraceRaysKHR must be 1" }, { "vuid": "VUID-vkCmdBindInvocationMaskHUAWEI-commandBuffer-parameter", @@ -66794,7 +66814,7 @@ }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-08601", @@ -67218,7 +67238,7 @@ }, { "vuid": "VUID-vkCmdTraceRaysIndirect2KHR-None-08600", - "text": " For each set n that is statically used a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" }, { "vuid": "VUID-vkCmdTraceRaysIndirect2KHR-None-08601", @@ -70134,7 +70154,7 @@ }, { "vuid": "VUID-vkCmdOpticalFlowExecuteNV-commandBuffer-cmdpool", - "text": " The VkCommandPool that commandBuffer was allocated from must support opticalflow operations" + "text": " The VkCommandPool that commandBuffer was allocated from must support optical flow operations" }, { "vuid": "VUID-vkCmdOpticalFlowExecuteNV-renderpass", @@ -74249,51 +74269,51 @@ "text": " OpTypeCooperativeMatrixNV and OpCooperativeMatrix* instructions must not be used in shader stages not included in VkPhysicalDeviceCooperativeMatrixPropertiesNV::cooperativeMatrixSupportedStages" }, { - "vuid": "VUID-RuntimeSpirv-OpTypeCooperativeMatrixKHR-", + "vuid": "VUID-RuntimeSpirv-OpTypeCooperativeMatrixKHR-08974", "text": " For OpTypeCooperativeMatrixKHR, the component type, scope, number of rows, and number of columns must match one of the matrices in any of the supported VkCooperativeMatrixPropertiesKHR." }, { - "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-", + "vuid": "VUID-RuntimeSpirv-MSize-08975", "text": " For OpCooperativeMatrixMulAddKHR, the type of A must have VkCooperativeMatrixPropertiesKHR::MSize rows and VkCooperativeMatrixPropertiesKHR::KSize columns and have a component type that matches VkCooperativeMatrixPropertiesKHR::AType." }, { - "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-", + "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-08976", "text": " For OpCooperativeMatrixMulAddKHR, when the component type of A is a signed integer type, the MatrixASignedComponents cooperative matrix operand must be present." }, { - "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-", + "vuid": "VUID-RuntimeSpirv-KSize-08977", "text": " For OpCooperativeMatrixMulAddKHR, the type of B must have VkCooperativeMatrixPropertiesKHR::KSize rows and VkCooperativeMatrixPropertiesKHR::NSize columns and have a component type that matches VkCooperativeMatrixPropertiesKHR::BType." }, { - "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-", + "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-08978", "text": " For OpCooperativeMatrixMulAddKHR, when the component type of B is a signed integer type, the MatrixBSignedComponents cooperative matrix operand must be present." }, { - "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-", + "vuid": "VUID-RuntimeSpirv-MSize-08979", "text": " For OpCooperativeMatrixMulAddKHR, the type of C must have VkCooperativeMatrixPropertiesKHR::MSize rows and VkCooperativeMatrixPropertiesKHR::NSize columns and have a component type that matches VkCooperativeMatrixPropertiesKHR::CType." }, { - "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-", + "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-08980", "text": " For OpCooperativeMatrixMulAddKHR, when the component type of C is a signed integer type, the MatrixCSignedComponents cooperative matrix operand must be present." }, { - "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-", + "vuid": "VUID-RuntimeSpirv-MSize-08981", "text": " For OpCooperativeMatrixMulAddKHR, the type of Result must have VkCooperativeMatrixPropertiesKHR::MSize rows and VkCooperativeMatrixPropertiesKHR::NSize columns and have a component type that matches VkCooperativeMatrixPropertiesKHR::ResultType." }, { - "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-", + "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-08982", "text": " For OpCooperativeMatrixMulAddKHR, when the component type of Result is a signed integer type, the MatrixResultSignedComponents cooperative matrix operand must be present." }, { - "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-", + "vuid": "VUID-RuntimeSpirv-saturatingAccumulation-08983", "text": " For OpCooperativeMatrixMulAddKHR, the SaturatingAccumulation cooperative matrix operand must be present if and only if VkCooperativeMatrixPropertiesKHR::saturatingAccumulation is VK_TRUE." }, { - "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixMulAddKHR-", + "vuid": "VUID-RuntimeSpirv-scope-08984", "text": " For OpCooperativeMatrixMulAddKHR, the type of A, B, C, and Result must all have a scope of scope." }, { - "vuid": "VUID-RuntimeSpirv-OpTypeCooperativeMatrixKHR-", + "vuid": "VUID-RuntimeSpirv-cooperativeMatrixSupportedStages-08985", "text": " OpTypeCooperativeMatrixKHR and OpCooperativeMatrix* instructions must not be used in shader stages not included in VkPhysicalDeviceCooperativeMatrixPropertiesKHR::cooperativeMatrixSupportedStages." }, { @@ -74413,7 +74433,7 @@ "text": " The sum of size in bytes for variables and padding in the TaskPayloadWorkgroupEXT or Workgroup {StorageClass} in the TaskEXT {ExecutionModel} must be less than or equal to maxTaskPayloadAndSharedMemorySize" }, { - "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixLoadKHR-", + "vuid": "VUID-RuntimeSpirv-OpCooperativeMatrixLoadKHR-08986", "text": " For OpCooperativeMatrixLoadKHR and OpCooperativeMatrixStoreKHR instructions, the Pointer and Stride operands must be aligned to at least the lesser of 16 bytes or the natural alignment of a row or column (depending on ColumnMajor) of the matrix (where the natural alignment is the number of columns/rows multiplied by the component size)." }, { diff --git a/registry/vk.xml b/registry/vk.xml index 1ebb5bb..de37660 100644 --- a/registry/vk.xml +++ b/registry/vk.xml @@ -175,7 +175,7 @@ branch of the member gitlab server. #define VKSC_API_VERSION_1_0 VK_MAKE_API_VERSION(VKSC_API_VARIANT, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 255 +#define VK_HEADER_VERSION 256 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) // Version of this file @@ -3818,7 +3818,7 @@ typedef void* MTLSharedEvent_id; VkStructureType sType const void* pNext VkExternalMemoryHandleTypeFlagBits handleType - void* pHostPointer + void* pHostPointer VkStructureType sType @@ -4697,7 +4697,7 @@ typedef void* MTLSharedEvent_id; VkStructureType sType const void* pNext uint64_t drmFormatModifier - uint32_t drmFormatModifierPlaneCount + uint32_t drmFormatModifierPlaneCount const VkSubresourceLayout* pPlaneLayouts @@ -6368,8 +6368,8 @@ typedef void* MTLSharedEvent_id; VkStructureType sType const void* pNext - VkDeviceSize commandPoolReservedSize - uint32_t commandPoolMaxCommandBuffers + VkDeviceSize commandPoolReservedSize + uint32_t commandPoolMaxCommandBuffers VkStructureType sType @@ -6700,7 +6700,7 @@ typedef void* MTLSharedEvent_id; VkStructureType sType const void* pNext - VkVideoCodingControlFlagsKHR flags + VkVideoCodingControlFlagsKHR flags VkStructureType sType @@ -12637,7 +12637,7 @@ typedef void* MTLSharedEvent_id; VkResult vkGetMemoryHostPointerPropertiesEXT VkDevice device VkExternalMemoryHandleTypeFlagBits handleType - const void* pHostPointer + const void* pHostPointer VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties @@ -20874,13 +20874,14 @@ typedef void* MTLSharedEvent_id; - + - + + @@ -23196,15 +23197,15 @@ typedef void* MTLSharedEvent_id; - + - + - + @@ -23324,15 +23325,15 @@ typedef void* MTLSharedEvent_id; - + - - + + @@ -24742,7 +24743,7 @@ typedef void* MTLSharedEvent_id; - + @@ -24801,7 +24802,7 @@ typedef void* MTLSharedEvent_id; - + @@ -24831,17 +24832,17 @@ typedef void* MTLSharedEvent_id; - + - + - + - + @@ -24885,13 +24886,13 @@ typedef void* MTLSharedEvent_id; - + - + - + @@ -24927,7 +24928,7 @@ typedef void* MTLSharedEvent_id; - + @@ -24939,13 +24940,13 @@ typedef void* MTLSharedEvent_id; - + - + @@ -25000,7 +25001,7 @@ typedef void* MTLSharedEvent_id; VK_PIPELINE_STAGE_2_HOST_BIT - VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI + VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV From 60b89abf43a687e454f7ff6f6bfe642118e358e8 Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Fri, 30 Jun 2023 06:33:05 -0700 Subject: [PATCH 03/17] Add new .cppm file to CONTRIBUTING.md --- CONTRIBUTING.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index bed4271..b9a129e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -33,3 +33,4 @@ split into multiple files. All of those files are now included in this repository. * include/vulkan/*.hpp +* include/vulkan/*.cppm From ad5f8ee9750e99c5397d44c075ae5d8a38271de4 Mon Sep 17 00:00:00 2001 From: Juan Ramos Date: Thu, 29 Jun 2023 11:10:07 -0600 Subject: [PATCH 04/17] cmake: Add VULKAN_HEADERS_INSTALL option Allows add_subdirectory users to install vulkan-headers --- .github/workflows/linux.yml | 19 ++++++++++++++++--- CMakeLists.txt | 3 +++ tests/add_subdirectory/CMakeLists.txt | 26 ++++++++++++++++++++++++++ 3 files changed, 45 insertions(+), 3 deletions(-) create mode 100644 tests/add_subdirectory/CMakeLists.txt diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 0160c3b..a15e439 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -19,6 +19,9 @@ on: pull_request: branches: - main + +env: + CMAKE_GENERATOR: Ninja permissions: contents: read @@ -32,7 +35,7 @@ jobs: with: cmakeVersion: 3.17.0 - name: Configure Vulkan-Headers - run: cmake -S . -B build -G "Ninja" + run: cmake -S . -B build - name: Install Vulkan-Headers run: cmake --install build --prefix ${{ github.workspace }}/build/install - name: Test Vulkan-Headers find_package support @@ -46,7 +49,7 @@ jobs: with: cmakeVersion: 3.25.0 - name: Configure Vulkan-Headers - run: cmake -S . -B build -D BUILD_TESTS=ON --log-level=DEBUG -G "Ninja" + run: cmake -S . -B build -D BUILD_TESTS=ON --log-level=DEBUG - name: Build Vulkan-Headers Tests run: cmake --build build @@ -58,6 +61,16 @@ jobs: with: cmakeVersion: 3.15.0 - name: Configure Vulkan-Headers - run: cmake -S . -B build/ -G "Ninja" --loglevel=DEBUG + run: cmake -S . -B build/ --loglevel=DEBUG - name: Install Vulkan-Headers run: cmake --install build/ --prefix build/install + + ubuntu-cmake-add_subdirectory: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: lukka/get-cmake@latest + - name: Test add_subdirectory support + run: cmake -S tests/add_subdirectory -B tests/add_subdirectory/build + - name: Build + run: cmake --build tests/add_subdirectory/build diff --git a/CMakeLists.txt b/CMakeLists.txt index 860acd8..3144c58 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -57,7 +57,10 @@ if (PROJECT_IS_TOP_LEVEL) if (BUILD_TESTS) add_subdirectory(tests) endif() +endif() +option(VULKAN_HEADERS_INSTALL "Install Vulkan Headers" ${PROJECT_IS_TOP_LEVEL}) +if (VULKAN_HEADERS_INSTALL) include(GNUInstallDirs) include(CMakePackageConfigHelpers) diff --git a/tests/add_subdirectory/CMakeLists.txt b/tests/add_subdirectory/CMakeLists.txt new file mode 100644 index 0000000..74f2066 --- /dev/null +++ b/tests/add_subdirectory/CMakeLists.txt @@ -0,0 +1,26 @@ +cmake_minimum_required(VERSION 3.14.2) + +project(TEST_VULKAN_HEADERS_ADD_SUBDIRECTORY_SUPPORT LANGUAGES C) + +add_subdirectory(../../ ${CMAKE_CURRENT_BINARY_DIR}/headers) + +if (NOT TARGET Vulkan::Headers) + message(FATAL_ERROR "Vulkan::Headers target not defined") +endif() + +# By default installation for a subproject should be disabled. +# This makes it easier to consume for most projects. +# Consuming the vulkan-headers via add_subdirectory and installing +# them is the more niche use case. +if (VULKAN_HEADERS_INSTALL) + message(FATAL_ERROR "VULKAN_HEADERS_INSTALL should be OFF!") +endif() + +add_library(foobar STATIC) + +target_link_libraries(foobar PRIVATE Vulkan::Headers) + +target_sources(foobar PRIVATE + ../vk_icd.c + ../vk_layer.c +) From 6f34ca5a370c3664c02abdfc9b11baf7b0c369bd Mon Sep 17 00:00:00 2001 From: Juan Ramos Date: Wed, 5 Jul 2023 09:59:18 -0600 Subject: [PATCH 05/17] cmake: Remove usage of configure_package_config_file Fixes issue shown here: https://github.com/KhronosGroup/Vulkan-Headers/pull/415 --- BUILD.md | 2 -- CMakeLists.txt | 25 ++++++------------------- cmake/VulkanHeadersConfig.cmake.in | 5 ----- tests/find_package/CMakeLists.txt | 9 --------- 4 files changed, 6 insertions(+), 35 deletions(-) delete mode 100644 cmake/VulkanHeadersConfig.cmake.in diff --git a/BUILD.md b/BUILD.md index 3dad776..70a635e 100644 --- a/BUILD.md +++ b/BUILD.md @@ -36,7 +36,5 @@ find_package(VulkanHeaders REQUIRED CONFIG) target_link_libraries(foobar PRIVATE Vulkan::Headers) -message(STATUS "Vulkan Headers Registry: ${VULKAN_HEADERS_REGISTRY_DIRECTORY}") - message(STATUS "Vulkan Headers Version: ${VulkanHeaders_VERSION}") ``` diff --git a/CMakeLists.txt b/CMakeLists.txt index 3144c58..d61e75f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -71,27 +71,14 @@ if (VULKAN_HEADERS_INSTALL) # Preserve source permissions https://github.com/KhronosGroup/Vulkan-Headers/issues/336 install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/registry" DESTINATION ${VLK_REGISTRY_DIR} USE_SOURCE_PERMISSIONS) - set(cmake_files_install_dir ${CMAKE_INSTALL_DATADIR}/cmake/VulkanHeaders/) - set_target_properties(Vulkan-Headers PROPERTIES EXPORT_NAME "Headers") - install(TARGETS Vulkan-Headers EXPORT VulkanHeadersTargets INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + set(cmake_install_dir "${CMAKE_INSTALL_DATADIR}/cmake/VulkanHeaders") - install(EXPORT VulkanHeadersTargets FILE VulkanHeadersTargets.cmake NAMESPACE "Vulkan::" DESTINATION ${cmake_files_install_dir}) + install(TARGETS Vulkan-Headers EXPORT VulkanHeadersConfig INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + install(EXPORT VulkanHeadersConfig NAMESPACE "Vulkan::" DESTINATION "${cmake_install_dir}") - set(vulkan_headers_config "${CMAKE_CURRENT_BINARY_DIR}/VulkanHeadersConfig.cmake") - set(VULKAN_HEADERS_REGISTRY_DIRECTORY "${VLK_REGISTRY_DIR}/registry") - - configure_package_config_file(${CMAKE_CURRENT_SOURCE_DIR}/cmake/VulkanHeadersConfig.cmake.in ${vulkan_headers_config} - INSTALL_DESTINATION ${cmake_files_install_dir} - PATH_VARS VULKAN_HEADERS_REGISTRY_DIRECTORY - NO_SET_AND_CHECK_MACRO - NO_CHECK_REQUIRED_COMPONENTS_MACRO - ) - - set(config_version "${CMAKE_CURRENT_BINARY_DIR}/VulkanHeadersConfigVersion.cmake") - - write_basic_package_version_file(${config_version} COMPATIBILITY SameMajorVersion ARCH_INDEPENDENT) - - install(FILES ${config_version} ${vulkan_headers_config} DESTINATION ${cmake_files_install_dir}) + set(version_config "${CMAKE_CURRENT_BINARY_DIR}/generated/VulkanHeadersConfigVersion.cmake") + write_basic_package_version_file("${version_config}" COMPATIBILITY SameMajorVersion ARCH_INDEPENDENT) + install(FILES "${version_config}" DESTINATION "${cmake_install_dir}") endif() diff --git a/cmake/VulkanHeadersConfig.cmake.in b/cmake/VulkanHeadersConfig.cmake.in deleted file mode 100644 index 2f74a53..0000000 --- a/cmake/VulkanHeadersConfig.cmake.in +++ /dev/null @@ -1,5 +0,0 @@ -@PACKAGE_INIT@ - -include("${CMAKE_CURRENT_LIST_DIR}/VulkanHeadersTargets.cmake") - -set(VULKAN_HEADERS_REGISTRY_DIRECTORY "@PACKAGE_VULKAN_HEADERS_REGISTRY_DIRECTORY@") diff --git a/tests/find_package/CMakeLists.txt b/tests/find_package/CMakeLists.txt index d9f32a4..f45fc6c 100644 --- a/tests/find_package/CMakeLists.txt +++ b/tests/find_package/CMakeLists.txt @@ -8,15 +8,6 @@ if (NOT TARGET Vulkan::Headers) message(FATAL_ERROR "Vulkan::Headers target not defined") endif() -if (NOT DEFINED VULKAN_HEADERS_REGISTRY_DIRECTORY) - message(FATAL_ERROR "VULKAN_HEADERS_REGISTRY_DIRECTORY not defined!") -endif() - -if (NOT EXISTS "${VULKAN_HEADERS_REGISTRY_DIRECTORY}/vk.xml") - message(FATAL_ERROR "VULKAN_HEADERS_REGISTRY_DIRECTORY not valid!") -endif() -message(STATUS "VULKAN_HEADERS_REGISTRY_DIRECTORY = ${VULKAN_HEADERS_REGISTRY_DIRECTORY}") - if (NOT DEFINED VulkanHeaders_VERSION) message(FATAL_ERROR "VulkanHeaders_VERSION not defined!") endif() From 482af6d865a925911eefa84f8ea9d9170f5168ad Mon Sep 17 00:00:00 2001 From: Juan Ramos Date: Wed, 5 Jul 2023 15:16:20 -0600 Subject: [PATCH 06/17] cmake: Remove VULKAN_HEADERS_INSTALL VULKAN_HEADERS_INSTALL was added since it was believed there was a valid use case for it. After looking into the use case that provoked this change there is no reason to keep VULKAN_HEADERS_INSTALL as an option. Here is the use case we do NOT want to support: https://github.com/KhronosGroup/Vulkan-Headers/pull/416#issuecomment-1622318949 Fundamentally this problem is caused add_subdirectory/find_package not being able to work together flawlessly. Which isn't the responsibility of Vulkan-Headers to fix. It's the responsibility of projects that consume Vulkan-Headers to account for either method. As described by the CMake maintainers: https://discourse.cmake.org/t/idiomatic-way-to-handle-packages-and-add-subdirectory/8400 --- CMakeLists.txt | 7 +------ tests/add_subdirectory/CMakeLists.txt | 9 +++------ 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index d61e75f..28b8caf 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -57,19 +57,14 @@ if (PROJECT_IS_TOP_LEVEL) if (BUILD_TESTS) add_subdirectory(tests) endif() -endif() -option(VULKAN_HEADERS_INSTALL "Install Vulkan Headers" ${PROJECT_IS_TOP_LEVEL}) -if (VULKAN_HEADERS_INSTALL) include(GNUInstallDirs) include(CMakePackageConfigHelpers) - set(VLK_REGISTRY_DIR "${CMAKE_INSTALL_DATADIR}/vulkan") - install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/vk_video" DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/include/vulkan" DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) # Preserve source permissions https://github.com/KhronosGroup/Vulkan-Headers/issues/336 - install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/registry" DESTINATION ${VLK_REGISTRY_DIR} USE_SOURCE_PERMISSIONS) + install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/registry" DESTINATION "${CMAKE_INSTALL_DATADIR}/vulkan" USE_SOURCE_PERMISSIONS) set_target_properties(Vulkan-Headers PROPERTIES EXPORT_NAME "Headers") diff --git a/tests/add_subdirectory/CMakeLists.txt b/tests/add_subdirectory/CMakeLists.txt index 74f2066..cb8fdcc 100644 --- a/tests/add_subdirectory/CMakeLists.txt +++ b/tests/add_subdirectory/CMakeLists.txt @@ -8,12 +8,9 @@ if (NOT TARGET Vulkan::Headers) message(FATAL_ERROR "Vulkan::Headers target not defined") endif() -# By default installation for a subproject should be disabled. -# This makes it easier to consume for most projects. -# Consuming the vulkan-headers via add_subdirectory and installing -# them is the more niche use case. -if (VULKAN_HEADERS_INSTALL) - message(FATAL_ERROR "VULKAN_HEADERS_INSTALL should be OFF!") +# Consuming vulkan-headers via add_subdirectory should NOT add installation code to the parent CMake project. +if (DEFINED CMAKE_INSTALL_INCLUDEDIR) + message(FATAL_ERROR "CMAKE_INSTALL_INCLUDEDIR was defined!") endif() add_library(foobar STATIC) From 179b26a792b10d9315e44f27aff196cdc3d4018f Mon Sep 17 00:00:00 2001 From: Juan Ramos Date: Wed, 5 Jul 2023 15:25:36 -0600 Subject: [PATCH 07/17] docs: Remove outdated documentation referencing `cmake/` --- .github/ISSUE_TEMPLATE/bug_report.md | 1 - .github/pull_request_template.md | 1 - CONTRIBUTING.md | 1 - 3 files changed, 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 486d70d..7d78ab8 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -15,7 +15,6 @@ This repository is responsible for the following files * BUILD.gn * BUILD.md -* cmake/ * CMakeLists.txt * tests/* * CODE_OF_CONDUCT.md diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 4a46e84..c7def69 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -6,7 +6,6 @@ This repository is responsible for the following files * BUILD.gn * BUILD.md -* cmake/ * CMakeLists.txt * tests/* * CODE_OF_CONDUCT.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b9a129e..6de905c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,7 +8,6 @@ The majority for the Vulkan headers come from [Vulkan-Docs](https://github.com/K * BUILD.gn * BUILD.md -* cmake/ * CMakeLists.txt * tests/* * CODE_OF_CONDUCT.md From 485c0395ad85bcefe7aed17d23362d93f61f942d Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Fri, 7 Jul 2023 07:19:56 -0700 Subject: [PATCH 08/17] Update for Vulkan-Docs 1.3.257 --- include/vk_video/vulkan_video_codec_h264std.h | 1 + .../vulkan_video_codec_h264std_decode.h | 1 + .../vulkan_video_codec_h264std_encode.h | 1 + include/vk_video/vulkan_video_codec_h265std.h | 1 + .../vulkan_video_codec_h265std_decode.h | 1 + .../vulkan_video_codec_h265std_encode.h | 1 + include/vk_video/vulkan_video_codecs_common.h | 1 + include/vulkan/vulkan.cppm | 6 +- include/vulkan/vulkan.hpp | 13 +- include/vulkan/vulkan_android.h | 2 + include/vulkan/vulkan_beta.h | 5 + include/vulkan/vulkan_core.h | 341 ++++++++++- include/vulkan/vulkan_directfb.h | 1 + include/vulkan/vulkan_enums.hpp | 27 +- include/vulkan/vulkan_fuchsia.h | 4 + include/vulkan/vulkan_ggp.h | 2 + include/vulkan/vulkan_ios.h | 1 + include/vulkan/vulkan_macos.h | 1 + include/vulkan/vulkan_metal.h | 2 + include/vulkan/vulkan_screen.h | 2 + include/vulkan/vulkan_structs.hpp | 20 +- include/vulkan/vulkan_to_string.hpp | 33 - include/vulkan/vulkan_vi.h | 1 + include/vulkan/vulkan_wayland.h | 1 + include/vulkan/vulkan_win32.h | 9 + include/vulkan/vulkan_xcb.h | 1 + include/vulkan/vulkan_xlib.h | 1 + include/vulkan/vulkan_xlib_xrandr.h | 1 + registry/cgenerator.py | 2 + registry/validusage.json | 562 +++++++++++++----- registry/vk.xml | 43 +- 31 files changed, 812 insertions(+), 276 deletions(-) diff --git a/include/vk_video/vulkan_video_codec_h264std.h b/include/vk_video/vulkan_video_codec_h264std.h index 21c7b66..959961c 100644 --- a/include/vk_video/vulkan_video_codec_h264std.h +++ b/include/vk_video/vulkan_video_codec_h264std.h @@ -19,6 +19,7 @@ extern "C" { +// vulkan_video_codec_h264std is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h264std 1 #include #define STD_VIDEO_H264_CPB_CNT_LIST_SIZE 32 diff --git a/include/vk_video/vulkan_video_codec_h264std_decode.h b/include/vk_video/vulkan_video_codec_h264std_decode.h index f7eb8ed..84ed632 100644 --- a/include/vk_video/vulkan_video_codec_h264std_decode.h +++ b/include/vk_video/vulkan_video_codec_h264std_decode.h @@ -19,6 +19,7 @@ extern "C" { +// vulkan_video_codec_h264std_decode is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h264std_decode 1 #define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) diff --git a/include/vk_video/vulkan_video_codec_h264std_encode.h b/include/vk_video/vulkan_video_codec_h264std_encode.h index 47c30f7..5c54a67 100644 --- a/include/vk_video/vulkan_video_codec_h264std_encode.h +++ b/include/vk_video/vulkan_video_codec_h264std_encode.h @@ -19,6 +19,7 @@ extern "C" { +// vulkan_video_codec_h264std_encode is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h264std_encode 1 // Vulkan 0.9 provisional Vulkan video H.264 encode std specification version number #define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_10 VK_MAKE_VIDEO_STD_VERSION(0, 9, 10) diff --git a/include/vk_video/vulkan_video_codec_h265std.h b/include/vk_video/vulkan_video_codec_h265std.h index 4233bdc..1221ba5 100644 --- a/include/vk_video/vulkan_video_codec_h265std.h +++ b/include/vk_video/vulkan_video_codec_h265std.h @@ -19,6 +19,7 @@ extern "C" { +// vulkan_video_codec_h265std is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h265std 1 #define STD_VIDEO_H265_SUBLAYERS_LIST_SIZE 7 #define STD_VIDEO_H265_CPB_CNT_LIST_SIZE 32 diff --git a/include/vk_video/vulkan_video_codec_h265std_decode.h b/include/vk_video/vulkan_video_codec_h265std_decode.h index 7eee9b3..a46a56b 100644 --- a/include/vk_video/vulkan_video_codec_h265std_decode.h +++ b/include/vk_video/vulkan_video_codec_h265std_decode.h @@ -19,6 +19,7 @@ extern "C" { +// vulkan_video_codec_h265std_decode is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h265std_decode 1 #define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) diff --git a/include/vk_video/vulkan_video_codec_h265std_encode.h b/include/vk_video/vulkan_video_codec_h265std_encode.h index d736390..2b4f612 100644 --- a/include/vk_video/vulkan_video_codec_h265std_encode.h +++ b/include/vk_video/vulkan_video_codec_h265std_encode.h @@ -19,6 +19,7 @@ extern "C" { +// vulkan_video_codec_h265std_encode is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h265std_encode 1 // Vulkan 0.9 provisional Vulkan video H.265 encode std specification version number #define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_11 VK_MAKE_VIDEO_STD_VERSION(0, 9, 11) diff --git a/include/vk_video/vulkan_video_codecs_common.h b/include/vk_video/vulkan_video_codecs_common.h index f486d3c..d0e86d1 100644 --- a/include/vk_video/vulkan_video_codecs_common.h +++ b/include/vk_video/vulkan_video_codecs_common.h @@ -19,6 +19,7 @@ extern "C" { +// vulkan_video_codecs_common is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codecs_common 1 #define VK_MAKE_VIDEO_STD_VERSION(major, minor, patch) \ ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) diff --git a/include/vulkan/vulkan.cppm b/include/vulkan/vulkan.cppm index 43b9c4e..3997211 100644 --- a/include/vulkan/vulkan.cppm +++ b/include/vulkan/vulkan.cppm @@ -617,10 +617,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT; using VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT; - //=== VK_NV_cooperative_matrix === - using VULKAN_HPP_NAMESPACE::ComponentTypeNV; - using VULKAN_HPP_NAMESPACE::ScopeNV; - //=== VK_NV_coverage_reduction_mode === using VULKAN_HPP_NAMESPACE::CoverageReductionModeNV; using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagBitsNV; @@ -810,7 +806,9 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_cooperative_matrix === using VULKAN_HPP_NAMESPACE::ComponentTypeKHR; + using VULKAN_HPP_NAMESPACE::ComponentTypeNV; using VULKAN_HPP_NAMESPACE::ScopeKHR; + using VULKAN_HPP_NAMESPACE::ScopeNV; //========================= //=== Index Type Traits === diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp index fa021e2..603be0f 100644 --- a/include/vulkan/vulkan.hpp +++ b/include/vulkan/vulkan.hpp @@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 256, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 257, "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION @@ -140,6 +140,17 @@ static_assert( VK_HEADER_VERSION == 256, "Wrong VK_HEADER_VERSION!" ); # undef MemoryBarrier #endif +// XLib.h defines True/False, which collides with our vk::True/vk::False +// -> undef them and provide some namepace-secure constexpr +#if defined( True ) +# undef True +constexpr int True = 1; +#endif +#if defined( False ) +# undef False +constexpr int False = 0; +#endif + #if defined( __GNUC__ ) # define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ ) #endif diff --git a/include/vulkan/vulkan_android.h b/include/vulkan/vulkan_android.h index 2e1dfa9..d86f43f 100644 --- a/include/vulkan/vulkan_android.h +++ b/include/vulkan/vulkan_android.h @@ -19,6 +19,7 @@ extern "C" { +// VK_KHR_android_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_android_surface 1 struct ANativeWindow; #define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 @@ -42,6 +43,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( #endif +// VK_ANDROID_external_memory_android_hardware_buffer is a preprocessor guard. Do not pass it to API calls. #define VK_ANDROID_external_memory_android_hardware_buffer 1 struct AHardwareBuffer; #define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 5 diff --git a/include/vulkan/vulkan_beta.h b/include/vulkan/vulkan_beta.h index f67a6a6..d2bcf4d 100644 --- a/include/vulkan/vulkan_beta.h +++ b/include/vulkan/vulkan_beta.h @@ -19,6 +19,7 @@ extern "C" { +// VK_KHR_portability_subset is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_portability_subset 1 #define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1 #define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset" @@ -50,6 +51,7 @@ typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR { +// VK_KHR_video_encode_queue is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_video_encode_queue 1 #define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 9 #define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue" @@ -221,6 +223,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR( #endif +// VK_EXT_video_encode_h264 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_video_encode_h264 1 #include "vk_video/vulkan_video_codec_h264std.h" #include "vk_video/vulkan_video_codec_h264std_encode.h" @@ -417,6 +420,7 @@ typedef struct VkVideoEncodeH264GopRemainingFrameInfoEXT { +// VK_EXT_video_encode_h265 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_video_encode_h265 1 #include "vk_video/vulkan_video_codec_h265std.h" #include "vk_video/vulkan_video_codec_h265std_encode.h" @@ -639,6 +643,7 @@ typedef struct VkVideoEncodeH265GopRemainingFrameInfoEXT { +// VK_NV_displacement_micromap is a preprocessor guard. Do not pass it to API calls. #define VK_NV_displacement_micromap 1 #define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 1 #define VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME "VK_NV_displacement_micromap" diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h index b56786b..cf85442 100644 --- a/include/vulkan/vulkan_core.h +++ b/include/vulkan/vulkan_core.h @@ -19,6 +19,7 @@ extern "C" { +// VK_VERSION_1_0 is a preprocessor guard. Do not pass it to API calls. #define VK_VERSION_1_0 1 #include "vk_platform.h" @@ -68,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 256 +#define VK_HEADER_VERSION 257 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -4836,6 +4837,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( #endif +// VK_VERSION_1_1 is a preprocessor guard. Do not pass it to API calls. #define VK_VERSION_1_1 1 // Vulkan 1.1 version number #define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0)// Patch version should always be set to 0 @@ -5702,6 +5704,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( #endif +// VK_VERSION_1_2 is a preprocessor guard. Do not pass it to API calls. #define VK_VERSION_1_2 1 // Vulkan 1.2 version number #define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0)// Patch version should always be set to 0 @@ -6456,6 +6459,7 @@ VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress( #endif +// VK_VERSION_1_3 is a preprocessor guard. Do not pass it to API calls. #define VK_VERSION_1_3 1 // Vulkan 1.3 version number #define VK_API_VERSION_1_3 VK_MAKE_API_VERSION(0, 1, 3, 0)// Patch version should always be set to 0 @@ -7498,6 +7502,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements( #endif +// VK_KHR_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_surface 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) #define VK_KHR_SURFACE_SPEC_VERSION 25 @@ -7612,6 +7617,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( #endif +// VK_KHR_swapchain is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_swapchain 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) #define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 @@ -7772,6 +7778,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( #endif +// VK_KHR_display is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_display 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) @@ -7897,6 +7904,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( #endif +// VK_KHR_display_swapchain is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_display_swapchain 1 #define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10 #define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" @@ -7920,11 +7928,13 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( #endif +// VK_KHR_sampler_mirror_clamp_to_edge is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_sampler_mirror_clamp_to_edge 1 #define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3 #define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" +// VK_KHR_video_queue is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_video_queue 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR) @@ -8220,6 +8230,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR( #endif +// VK_KHR_video_decode_queue is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_video_decode_queue 1 #define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 7 #define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue" @@ -8274,6 +8285,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR( #endif +// VK_KHR_video_decode_h264 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_video_decode_h264 1 #include "vk_video/vulkan_video_codec_h264std.h" #include "vk_video/vulkan_video_codec_h264std_decode.h" @@ -8334,6 +8346,7 @@ typedef struct VkVideoDecodeH264DpbSlotInfoKHR { +// VK_KHR_dynamic_rendering is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_dynamic_rendering 1 #define VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION 1 #define VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME "VK_KHR_dynamic_rendering" @@ -8396,6 +8409,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderingKHR( #endif +// VK_KHR_multiview is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_multiview 1 #define VK_KHR_MULTIVIEW_SPEC_VERSION 1 #define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" @@ -8407,6 +8421,7 @@ typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesK +// VK_KHR_get_physical_device_properties2 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_get_physical_device_properties2 1 #define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2 #define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" @@ -8472,6 +8487,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( #endif +// VK_KHR_device_group is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_device_group 1 #define VK_KHR_DEVICE_GROUP_SPEC_VERSION 4 #define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" @@ -8524,11 +8540,13 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( #endif +// VK_KHR_shader_draw_parameters is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_draw_parameters 1 #define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 #define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" +// VK_KHR_maintenance1 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_maintenance1 1 #define VK_KHR_MAINTENANCE_1_SPEC_VERSION 2 #define VK_KHR_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_maintenance1" @@ -8546,6 +8564,7 @@ VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( #endif +// VK_KHR_device_group_creation is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_device_group_creation 1 #define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 #define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" @@ -8564,6 +8583,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( #endif +// VK_KHR_external_memory_capabilities is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_memory_capabilities 1 #define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" @@ -8598,6 +8618,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( #endif +// VK_KHR_external_memory is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_memory 1 #define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" @@ -8610,6 +8631,7 @@ typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; +// VK_KHR_external_memory_fd is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_memory_fd 1 #define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" @@ -8650,6 +8672,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( #endif +// VK_KHR_external_semaphore_capabilities is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_semaphore_capabilities 1 #define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" @@ -8675,6 +8698,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( #endif +// VK_KHR_external_semaphore is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_semaphore 1 #define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" @@ -8686,6 +8710,7 @@ typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; +// VK_KHR_external_semaphore_fd is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_semaphore_fd 1 #define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" @@ -8720,6 +8745,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( #endif +// VK_KHR_push_descriptor is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_push_descriptor 1 #define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 #define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" @@ -8750,6 +8776,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( #endif +// VK_KHR_shader_float16_int8 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_float16_int8 1 #define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1 #define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8" @@ -8759,6 +8786,7 @@ typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8Fea +// VK_KHR_16bit_storage is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_16bit_storage 1 #define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 #define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" @@ -8766,6 +8794,7 @@ typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeature +// VK_KHR_incremental_present is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_incremental_present 1 #define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 2 #define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" @@ -8789,6 +8818,7 @@ typedef struct VkPresentRegionsKHR { +// VK_KHR_descriptor_update_template is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_descriptor_update_template 1 typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; @@ -8826,6 +8856,7 @@ VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( #endif +// VK_KHR_imageless_framebuffer is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_imageless_framebuffer 1 #define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1 #define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer" @@ -8839,6 +8870,7 @@ typedef VkRenderPassAttachmentBeginInfo VkRenderPassAttachmentBeginInfoKHR; +// VK_KHR_create_renderpass2 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_create_renderpass2 1 #define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1 #define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2" @@ -8884,6 +8916,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR( #endif +// VK_KHR_shared_presentable_image is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shared_presentable_image 1 #define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 #define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" @@ -8902,6 +8935,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( #endif +// VK_KHR_external_fence_capabilities is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_fence_capabilities 1 #define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" @@ -8927,6 +8961,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( #endif +// VK_KHR_external_fence is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_fence 1 #define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" @@ -8938,6 +8973,7 @@ typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; +// VK_KHR_external_fence_fd is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_fence_fd 1 #define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" @@ -8972,6 +9008,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( #endif +// VK_KHR_performance_query is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_performance_query 1 #define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1 #define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query" @@ -9112,6 +9149,7 @@ VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR( #endif +// VK_KHR_maintenance2 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_maintenance2 1 #define VK_KHR_MAINTENANCE_2_SPEC_VERSION 1 #define VK_KHR_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_maintenance2" @@ -9133,6 +9171,7 @@ typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellation +// VK_KHR_get_surface_capabilities2 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_get_surface_capabilities2 1 #define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 #define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" @@ -9171,6 +9210,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( #endif +// VK_KHR_variable_pointers is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_variable_pointers 1 #define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 #define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" @@ -9180,6 +9220,7 @@ typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointer +// VK_KHR_get_display_properties2 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_get_display_properties2 1 #define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1 #define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2" @@ -9243,6 +9284,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR( #endif +// VK_KHR_dedicated_allocation is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_dedicated_allocation 1 #define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 #define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" @@ -9252,16 +9294,19 @@ typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; +// VK_KHR_storage_buffer_storage_class is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_storage_buffer_storage_class 1 #define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 #define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" +// VK_KHR_relaxed_block_layout is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_relaxed_block_layout 1 #define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 #define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" +// VK_KHR_get_memory_requirements2 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_get_memory_requirements2 1 #define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 #define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" @@ -9298,6 +9343,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( #endif +// VK_KHR_image_format_list is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_image_format_list 1 #define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 #define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" @@ -9305,6 +9351,7 @@ typedef VkImageFormatListCreateInfo VkImageFormatListCreateInfoKHR; +// VK_KHR_sampler_ycbcr_conversion is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_sampler_ycbcr_conversion 1 typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; @@ -9345,6 +9392,7 @@ VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( #endif +// VK_KHR_bind_memory2 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_bind_memory2 1 #define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 #define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" @@ -9368,6 +9416,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( #endif +// VK_KHR_maintenance3 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_maintenance3 1 #define VK_KHR_MAINTENANCE_3_SPEC_VERSION 1 #define VK_KHR_MAINTENANCE_3_EXTENSION_NAME "VK_KHR_maintenance3" @@ -9387,6 +9436,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( #endif +// VK_KHR_draw_indirect_count is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_draw_indirect_count 1 #define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 #define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count" @@ -9414,6 +9464,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR( #endif +// VK_KHR_shader_subgroup_extended_types is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_subgroup_extended_types 1 #define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1 #define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types" @@ -9421,6 +9472,7 @@ typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures VkPhysicalDeviceShad +// VK_KHR_8bit_storage is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_8bit_storage 1 #define VK_KHR_8BIT_STORAGE_SPEC_VERSION 1 #define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage" @@ -9428,6 +9480,7 @@ typedef VkPhysicalDevice8BitStorageFeatures VkPhysicalDevice8BitStorageFeaturesK +// VK_KHR_shader_atomic_int64 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_atomic_int64 1 #define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1 #define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64" @@ -9435,6 +9488,7 @@ typedef VkPhysicalDeviceShaderAtomicInt64Features VkPhysicalDeviceShaderAtomicIn +// VK_KHR_shader_clock is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_clock 1 #define VK_KHR_SHADER_CLOCK_SPEC_VERSION 1 #define VK_KHR_SHADER_CLOCK_EXTENSION_NAME "VK_KHR_shader_clock" @@ -9447,6 +9501,7 @@ typedef struct VkPhysicalDeviceShaderClockFeaturesKHR { +// VK_KHR_video_decode_h265 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_video_decode_h265 1 #include "vk_video/vulkan_video_codec_h265std.h" #include "vk_video/vulkan_video_codec_h265std_decode.h" @@ -9500,6 +9555,7 @@ typedef struct VkVideoDecodeH265DpbSlotInfoKHR { +// VK_KHR_global_priority is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_global_priority 1 #define VK_MAX_GLOBAL_PRIORITY_SIZE_KHR 16U #define VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION 1 @@ -9537,6 +9593,7 @@ typedef struct VkQueueFamilyGlobalPriorityPropertiesKHR { +// VK_KHR_driver_properties is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_driver_properties 1 #define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1 #define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties" @@ -9550,6 +9607,7 @@ typedef VkPhysicalDeviceDriverProperties VkPhysicalDeviceDriverPropertiesKHR; +// VK_KHR_shader_float_controls is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_float_controls 1 #define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4 #define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls" @@ -9559,6 +9617,7 @@ typedef VkPhysicalDeviceFloatControlsProperties VkPhysicalDeviceFloatControlsPro +// VK_KHR_depth_stencil_resolve is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_depth_stencil_resolve 1 #define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1 #define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve" @@ -9572,11 +9631,13 @@ typedef VkPhysicalDeviceDepthStencilResolveProperties VkPhysicalDeviceDepthStenc +// VK_KHR_swapchain_mutable_format is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_swapchain_mutable_format 1 #define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1 #define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format" +// VK_KHR_timeline_semaphore is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_timeline_semaphore 1 #define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2 #define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore" @@ -9619,6 +9680,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR( #endif +// VK_KHR_vulkan_memory_model is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_vulkan_memory_model 1 #define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3 #define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model" @@ -9626,6 +9688,7 @@ typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryMo +// VK_KHR_shader_terminate_invocation is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_terminate_invocation 1 #define VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION 1 #define VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME "VK_KHR_shader_terminate_invocation" @@ -9633,6 +9696,7 @@ typedef VkPhysicalDeviceShaderTerminateInvocationFeatures VkPhysicalDeviceShader +// VK_KHR_fragment_shading_rate is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_fragment_shading_rate 1 #define VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION 2 #define VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "VK_KHR_fragment_shading_rate" @@ -9712,11 +9776,13 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR( #endif +// VK_KHR_spirv_1_4 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_spirv_1_4 1 #define VK_KHR_SPIRV_1_4_SPEC_VERSION 1 #define VK_KHR_SPIRV_1_4_EXTENSION_NAME "VK_KHR_spirv_1_4" +// VK_KHR_surface_protected_capabilities is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_surface_protected_capabilities 1 #define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1 #define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities" @@ -9728,6 +9794,7 @@ typedef struct VkSurfaceProtectedCapabilitiesKHR { +// VK_KHR_separate_depth_stencil_layouts is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_separate_depth_stencil_layouts 1 #define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1 #define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts" @@ -9739,6 +9806,7 @@ typedef VkAttachmentDescriptionStencilLayout VkAttachmentDescriptionStencilLayou +// VK_KHR_present_wait is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_present_wait 1 #define VK_KHR_PRESENT_WAIT_SPEC_VERSION 1 #define VK_KHR_PRESENT_WAIT_EXTENSION_NAME "VK_KHR_present_wait" @@ -9759,6 +9827,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkWaitForPresentKHR( #endif +// VK_KHR_uniform_buffer_standard_layout is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_uniform_buffer_standard_layout 1 #define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1 #define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout" @@ -9766,6 +9835,7 @@ typedef VkPhysicalDeviceUniformBufferStandardLayoutFeatures VkPhysicalDeviceUnif +// VK_KHR_buffer_device_address is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_buffer_device_address 1 #define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1 #define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address" @@ -9798,6 +9868,7 @@ VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR( #endif +// VK_KHR_deferred_host_operations is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_deferred_host_operations 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) #define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 4 @@ -9833,6 +9904,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR( #endif +// VK_KHR_pipeline_executable_properties is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_pipeline_executable_properties 1 #define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1 #define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties" @@ -9923,6 +9995,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR #endif +// VK_KHR_map_memory2 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_map_memory2 1 #define VK_KHR_MAP_MEMORY_2_SPEC_VERSION 1 #define VK_KHR_MAP_MEMORY_2_EXTENSION_NAME "VK_KHR_map_memory2" @@ -9958,6 +10031,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkUnmapMemory2KHR( #endif +// VK_KHR_shader_integer_dot_product is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_integer_dot_product 1 #define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION 1 #define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME "VK_KHR_shader_integer_dot_product" @@ -9967,6 +10041,7 @@ typedef VkPhysicalDeviceShaderIntegerDotProductProperties VkPhysicalDeviceShader +// VK_KHR_pipeline_library is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_pipeline_library 1 #define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1 #define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library" @@ -9979,11 +10054,13 @@ typedef struct VkPipelineLibraryCreateInfoKHR { +// VK_KHR_shader_non_semantic_info is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_non_semantic_info 1 #define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1 #define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info" +// VK_KHR_present_id is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_present_id 1 #define VK_KHR_PRESENT_ID_SPEC_VERSION 1 #define VK_KHR_PRESENT_ID_EXTENSION_NAME "VK_KHR_present_id" @@ -10002,6 +10079,7 @@ typedef struct VkPhysicalDevicePresentIdFeaturesKHR { +// VK_KHR_synchronization2 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_synchronization2 1 #define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1 #define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2" @@ -10102,6 +10180,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( #endif +// VK_KHR_fragment_shader_barycentric is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_fragment_shader_barycentric 1 #define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 #define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_KHR_fragment_shader_barycentric" @@ -10119,6 +10198,7 @@ typedef struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR { +// VK_KHR_shader_subgroup_uniform_control_flow is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_subgroup_uniform_control_flow 1 #define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1 #define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow" @@ -10130,6 +10210,7 @@ typedef struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR { +// VK_KHR_zero_initialize_workgroup_memory is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_zero_initialize_workgroup_memory 1 #define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1 #define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory" @@ -10137,6 +10218,7 @@ typedef VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures VkPhysicalDeviceZe +// VK_KHR_workgroup_memory_explicit_layout is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_workgroup_memory_explicit_layout 1 #define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION 1 #define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME "VK_KHR_workgroup_memory_explicit_layout" @@ -10151,6 +10233,7 @@ typedef struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { +// VK_KHR_copy_commands2 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_copy_commands2 1 #define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1 #define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2" @@ -10210,6 +10293,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR( #endif +// VK_KHR_format_feature_flags2 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_format_feature_flags2 1 #define VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION 2 #define VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME "VK_KHR_format_feature_flags2" @@ -10221,6 +10305,7 @@ typedef VkFormatProperties3 VkFormatProperties3KHR; +// VK_KHR_ray_tracing_maintenance1 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_ray_tracing_maintenance1 1 #define VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION 1 #define VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_ray_tracing_maintenance1" @@ -10257,11 +10342,13 @@ VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirect2KHR( #endif +// VK_KHR_portability_enumeration is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_portability_enumeration 1 #define VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION 1 #define VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME "VK_KHR_portability_enumeration" +// VK_KHR_maintenance4 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_maintenance4 1 #define VK_KHR_MAINTENANCE_4_SPEC_VERSION 2 #define VK_KHR_MAINTENANCE_4_EXTENSION_NAME "VK_KHR_maintenance4" @@ -10296,6 +10383,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR( #endif +// VK_KHR_ray_tracing_position_fetch is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_ray_tracing_position_fetch 1 #define VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION 1 #define VK_KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME "VK_KHR_ray_tracing_position_fetch" @@ -10307,6 +10395,7 @@ typedef struct VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR { +// VK_KHR_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_cooperative_matrix 1 #define VK_KHR_COOPERATIVE_MATRIX_SPEC_VERSION 2 #define VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_KHR_cooperative_matrix" @@ -10323,6 +10412,17 @@ typedef enum VkComponentTypeKHR { VK_COMPONENT_TYPE_UINT16_KHR = 8, VK_COMPONENT_TYPE_UINT32_KHR = 9, VK_COMPONENT_TYPE_UINT64_KHR = 10, + VK_COMPONENT_TYPE_FLOAT16_NV = VK_COMPONENT_TYPE_FLOAT16_KHR, + VK_COMPONENT_TYPE_FLOAT32_NV = VK_COMPONENT_TYPE_FLOAT32_KHR, + VK_COMPONENT_TYPE_FLOAT64_NV = VK_COMPONENT_TYPE_FLOAT64_KHR, + VK_COMPONENT_TYPE_SINT8_NV = VK_COMPONENT_TYPE_SINT8_KHR, + VK_COMPONENT_TYPE_SINT16_NV = VK_COMPONENT_TYPE_SINT16_KHR, + VK_COMPONENT_TYPE_SINT32_NV = VK_COMPONENT_TYPE_SINT32_KHR, + VK_COMPONENT_TYPE_SINT64_NV = VK_COMPONENT_TYPE_SINT64_KHR, + VK_COMPONENT_TYPE_UINT8_NV = VK_COMPONENT_TYPE_UINT8_KHR, + VK_COMPONENT_TYPE_UINT16_NV = VK_COMPONENT_TYPE_UINT16_KHR, + VK_COMPONENT_TYPE_UINT32_NV = VK_COMPONENT_TYPE_UINT32_KHR, + VK_COMPONENT_TYPE_UINT64_NV = VK_COMPONENT_TYPE_UINT64_KHR, VK_COMPONENT_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF } VkComponentTypeKHR; @@ -10331,6 +10431,10 @@ typedef enum VkScopeKHR { VK_SCOPE_WORKGROUP_KHR = 2, VK_SCOPE_SUBGROUP_KHR = 3, VK_SCOPE_QUEUE_FAMILY_KHR = 5, + VK_SCOPE_DEVICE_NV = VK_SCOPE_DEVICE_KHR, + VK_SCOPE_WORKGROUP_NV = VK_SCOPE_WORKGROUP_KHR, + VK_SCOPE_SUBGROUP_NV = VK_SCOPE_SUBGROUP_KHR, + VK_SCOPE_QUEUE_FAMILY_NV = VK_SCOPE_QUEUE_FAMILY_KHR, VK_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF } VkScopeKHR; typedef struct VkCooperativeMatrixPropertiesKHR { @@ -10370,6 +10474,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR #endif +// VK_EXT_debug_report is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_debug_report 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) #define VK_EXT_DEBUG_REPORT_SPEC_VERSION 10 @@ -10477,21 +10582,25 @@ VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( #endif +// VK_NV_glsl_shader is a preprocessor guard. Do not pass it to API calls. #define VK_NV_glsl_shader 1 #define VK_NV_GLSL_SHADER_SPEC_VERSION 1 #define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" +// VK_EXT_depth_range_unrestricted is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_depth_range_unrestricted 1 #define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 #define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" +// VK_IMG_filter_cubic is a preprocessor guard. Do not pass it to API calls. #define VK_IMG_filter_cubic 1 #define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 #define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" +// VK_AMD_rasterization_order is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_rasterization_order 1 #define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 #define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" @@ -10509,16 +10618,19 @@ typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { +// VK_AMD_shader_trinary_minmax is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_shader_trinary_minmax 1 #define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 #define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" +// VK_AMD_shader_explicit_vertex_parameter is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_shader_explicit_vertex_parameter 1 #define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 #define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" +// VK_EXT_debug_marker is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_debug_marker 1 #define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 #define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" @@ -10575,11 +10687,13 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( #endif +// VK_AMD_gcn_shader is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_gcn_shader 1 #define VK_AMD_GCN_SHADER_SPEC_VERSION 1 #define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" +// VK_NV_dedicated_allocation is a preprocessor guard. Do not pass it to API calls. #define VK_NV_dedicated_allocation 1 #define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 #define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" @@ -10604,6 +10718,7 @@ typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { +// VK_EXT_transform_feedback is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_transform_feedback 1 #define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1 #define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback" @@ -10691,6 +10806,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( #endif +// VK_NVX_binary_import is a preprocessor guard. Do not pass it to API calls. #define VK_NVX_binary_import 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX) @@ -10762,6 +10878,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX( #endif +// VK_NVX_image_view_handle is a preprocessor guard. Do not pass it to API calls. #define VK_NVX_image_view_handle 1 #define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2 #define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" @@ -10795,6 +10912,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX( #endif +// VK_AMD_draw_indirect_count is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_draw_indirect_count 1 #define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2 #define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" @@ -10822,21 +10940,25 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( #endif +// VK_AMD_negative_viewport_height is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_negative_viewport_height 1 #define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 #define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" +// VK_AMD_gpu_shader_half_float is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_gpu_shader_half_float 1 #define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2 #define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" +// VK_AMD_shader_ballot is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_shader_ballot 1 #define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 #define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" +// VK_AMD_texture_gather_bias_lod is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_texture_gather_bias_lod 1 #define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 #define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" @@ -10848,6 +10970,7 @@ typedef struct VkTextureLODGatherFormatPropertiesAMD { +// VK_AMD_shader_info is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_shader_info 1 #define VK_AMD_SHADER_INFO_SPEC_VERSION 1 #define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" @@ -10889,11 +11012,13 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( #endif +// VK_AMD_shader_image_load_store_lod is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_shader_image_load_store_lod 1 #define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 #define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" +// VK_NV_corner_sampled_image is a preprocessor guard. Do not pass it to API calls. #define VK_NV_corner_sampled_image 1 #define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2 #define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image" @@ -10905,11 +11030,13 @@ typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV { +// VK_IMG_format_pvrtc is a preprocessor guard. Do not pass it to API calls. #define VK_IMG_format_pvrtc 1 #define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 #define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" +// VK_NV_external_memory_capabilities is a preprocessor guard. Do not pass it to API calls. #define VK_NV_external_memory_capabilities 1 #define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 #define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" @@ -10952,6 +11079,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesN #endif +// VK_NV_external_memory is a preprocessor guard. Do not pass it to API calls. #define VK_NV_external_memory 1 #define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 #define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" @@ -10969,6 +11097,7 @@ typedef struct VkExportMemoryAllocateInfoNV { +// VK_EXT_validation_flags is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_validation_flags 1 #define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 2 #define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" @@ -10987,16 +11116,19 @@ typedef struct VkValidationFlagsEXT { +// VK_EXT_shader_subgroup_ballot is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_subgroup_ballot 1 #define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 #define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" +// VK_EXT_shader_subgroup_vote is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_subgroup_vote 1 #define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 #define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" +// VK_EXT_texture_compression_astc_hdr is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_texture_compression_astc_hdr 1 #define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1 #define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr" @@ -11004,6 +11136,7 @@ typedef VkPhysicalDeviceTextureCompressionASTCHDRFeatures VkPhysicalDeviceTextur +// VK_EXT_astc_decode_mode is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_astc_decode_mode 1 #define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1 #define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode" @@ -11021,6 +11154,7 @@ typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT { +// VK_EXT_pipeline_robustness is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_pipeline_robustness 1 #define VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION 1 #define VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_pipeline_robustness" @@ -11066,6 +11200,7 @@ typedef struct VkPipelineRobustnessCreateInfoEXT { +// VK_EXT_conditional_rendering is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_conditional_rendering 1 #define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2 #define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering" @@ -11109,6 +11244,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( #endif +// VK_NV_clip_space_w_scaling is a preprocessor guard. Do not pass it to API calls. #define VK_NV_clip_space_w_scaling 1 #define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 #define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" @@ -11136,6 +11272,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( #endif +// VK_EXT_direct_mode_display is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_direct_mode_display 1 #define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 #define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" @@ -11148,6 +11285,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( #endif +// VK_EXT_display_surface_counter is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_display_surface_counter 1 #define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 #define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" @@ -11184,6 +11322,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( #endif +// VK_EXT_display_control is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_display_control 1 #define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 #define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" @@ -11260,6 +11399,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( #endif +// VK_GOOGLE_display_timing is a preprocessor guard. Do not pass it to API calls. #define VK_GOOGLE_display_timing 1 #define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 #define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" @@ -11304,16 +11444,19 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( #endif +// VK_NV_sample_mask_override_coverage is a preprocessor guard. Do not pass it to API calls. #define VK_NV_sample_mask_override_coverage 1 #define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 #define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" +// VK_NV_geometry_shader_passthrough is a preprocessor guard. Do not pass it to API calls. #define VK_NV_geometry_shader_passthrough 1 #define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 #define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" +// VK_NV_viewport_array2 is a preprocessor guard. Do not pass it to API calls. #define VK_NV_viewport_array2 1 #define VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION 1 #define VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME "VK_NV_viewport_array2" @@ -11321,6 +11464,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( #define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME +// VK_NVX_multiview_per_view_attributes is a preprocessor guard. Do not pass it to API calls. #define VK_NVX_multiview_per_view_attributes 1 #define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 #define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" @@ -11332,6 +11476,7 @@ typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { +// VK_NV_viewport_swizzle is a preprocessor guard. Do not pass it to API calls. #define VK_NV_viewport_swizzle 1 #define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 #define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" @@ -11365,6 +11510,7 @@ typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { +// VK_EXT_discard_rectangles is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_discard_rectangles 1 #define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 2 #define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" @@ -11411,6 +11557,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleModeEXT( #endif +// VK_EXT_conservative_rasterization is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_conservative_rasterization 1 #define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 #define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" @@ -11446,6 +11593,7 @@ typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { +// VK_EXT_depth_clip_enable is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_depth_clip_enable 1 #define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1 #define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable" @@ -11465,11 +11613,13 @@ typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT { +// VK_EXT_swapchain_colorspace is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_swapchain_colorspace 1 #define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 4 #define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" +// VK_EXT_hdr_metadata is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_hdr_metadata 1 #define VK_EXT_HDR_METADATA_SPEC_VERSION 2 #define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" @@ -11502,17 +11652,20 @@ VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( #endif +// VK_EXT_external_memory_dma_buf is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_external_memory_dma_buf 1 #define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 #define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" +// VK_EXT_queue_family_foreign is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_queue_family_foreign 1 #define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 #define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" #define VK_QUEUE_FAMILY_FOREIGN_EXT (~2U) +// VK_EXT_debug_utils is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_debug_utils 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) #define VK_EXT_DEBUG_UTILS_SPEC_VERSION 2 @@ -11655,6 +11808,7 @@ VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( #endif +// VK_EXT_sampler_filter_minmax is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_sampler_filter_minmax 1 #define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2 #define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" @@ -11666,21 +11820,25 @@ typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFil +// VK_AMD_gpu_shader_int16 is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_gpu_shader_int16 1 #define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2 #define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" +// VK_AMD_mixed_attachment_samples is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_mixed_attachment_samples 1 #define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 #define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" +// VK_AMD_shader_fragment_mask is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_shader_fragment_mask 1 #define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 #define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" +// VK_EXT_inline_uniform_block is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_inline_uniform_block 1 #define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1 #define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block" @@ -11694,11 +11852,13 @@ typedef VkDescriptorPoolInlineUniformBlockCreateInfo VkDescriptorPoolInlineUnifo +// VK_EXT_shader_stencil_export is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_stencil_export 1 #define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 #define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" +// VK_EXT_sample_locations is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_sample_locations 1 #define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 #define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" @@ -11773,6 +11933,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( #endif +// VK_EXT_blend_operation_advanced is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_blend_operation_advanced 1 #define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 #define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" @@ -11810,6 +11971,7 @@ typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { +// VK_NV_fragment_coverage_to_color is a preprocessor guard. Do not pass it to API calls. #define VK_NV_fragment_coverage_to_color 1 #define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 #define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" @@ -11824,6 +11986,7 @@ typedef struct VkPipelineCoverageToColorStateCreateInfoNV { +// VK_NV_framebuffer_mixed_samples is a preprocessor guard. Do not pass it to API calls. #define VK_NV_framebuffer_mixed_samples 1 #define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 #define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" @@ -11848,11 +12011,13 @@ typedef struct VkPipelineCoverageModulationStateCreateInfoNV { +// VK_NV_fill_rectangle is a preprocessor guard. Do not pass it to API calls. #define VK_NV_fill_rectangle 1 #define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 #define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" +// VK_NV_shader_sm_builtins is a preprocessor guard. Do not pass it to API calls. #define VK_NV_shader_sm_builtins 1 #define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1 #define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins" @@ -11871,11 +12036,13 @@ typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV { +// VK_EXT_post_depth_coverage is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_post_depth_coverage 1 #define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 #define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" +// VK_EXT_image_drm_format_modifier is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_image_drm_format_modifier 1 #define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 2 #define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier" @@ -11945,6 +12112,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT( #endif +// VK_EXT_validation_cache is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_validation_cache 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) #define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 @@ -12000,6 +12168,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( #endif +// VK_EXT_descriptor_indexing is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_descriptor_indexing 1 #define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 #define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" @@ -12019,11 +12188,13 @@ typedef VkDescriptorSetVariableDescriptorCountLayoutSupport VkDescriptorSetVaria +// VK_EXT_shader_viewport_index_layer is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_viewport_index_layer 1 #define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 #define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" +// VK_NV_shading_rate_image is a preprocessor guard. Do not pass it to API calls. #define VK_NV_shading_rate_image 1 #define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3 #define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image" @@ -12124,6 +12295,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( #endif +// VK_NV_ray_tracing is a preprocessor guard. Do not pass it to API calls. #define VK_NV_ray_tracing 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) #define VK_NV_RAY_TRACING_SPEC_VERSION 3 @@ -12502,6 +12674,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV( #endif +// VK_NV_representative_fragment_test is a preprocessor guard. Do not pass it to API calls. #define VK_NV_representative_fragment_test 1 #define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2 #define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test" @@ -12519,6 +12692,7 @@ typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV { +// VK_EXT_filter_cubic is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_filter_cubic 1 #define VK_EXT_FILTER_CUBIC_SPEC_VERSION 3 #define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic" @@ -12537,11 +12711,13 @@ typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { +// VK_QCOM_render_pass_shader_resolve is a preprocessor guard. Do not pass it to API calls. #define VK_QCOM_render_pass_shader_resolve 1 #define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4 #define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve" +// VK_EXT_global_priority is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_global_priority 1 #define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 #define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" @@ -12551,6 +12727,7 @@ typedef VkDeviceQueueGlobalPriorityCreateInfoKHR VkDeviceQueueGlobalPriorityCrea +// VK_EXT_external_memory_host is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_external_memory_host 1 #define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 #define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" @@ -12584,6 +12761,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( #endif +// VK_AMD_buffer_marker is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_buffer_marker 1 #define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 #define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" @@ -12599,6 +12777,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( #endif +// VK_AMD_pipeline_compiler_control is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_pipeline_compiler_control 1 #define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1 #define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control" @@ -12615,6 +12794,7 @@ typedef struct VkPipelineCompilerControlCreateInfoAMD { +// VK_EXT_calibrated_timestamps is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_calibrated_timestamps 1 #define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 2 #define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps" @@ -12650,6 +12830,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT( #endif +// VK_AMD_shader_core_properties is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_shader_core_properties 1 #define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2 #define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" @@ -12674,6 +12855,7 @@ typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { +// VK_AMD_memory_overallocation_behavior is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_memory_overallocation_behavior 1 #define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1 #define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior" @@ -12692,6 +12874,7 @@ typedef struct VkDeviceMemoryOverallocationCreateInfoAMD { +// VK_EXT_vertex_attribute_divisor is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_vertex_attribute_divisor 1 #define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3 #define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" @@ -12722,6 +12905,7 @@ typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT { +// VK_EXT_pipeline_creation_feedback is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_pipeline_creation_feedback 1 #define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1 #define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback" @@ -12735,11 +12919,13 @@ typedef VkPipelineCreationFeedback VkPipelineCreationFeedbackEXT; +// VK_NV_shader_subgroup_partitioned is a preprocessor guard. Do not pass it to API calls. #define VK_NV_shader_subgroup_partitioned 1 #define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 #define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" +// VK_NV_compute_shader_derivatives is a preprocessor guard. Do not pass it to API calls. #define VK_NV_compute_shader_derivatives 1 #define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 #define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" @@ -12752,6 +12938,7 @@ typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesNV { +// VK_NV_mesh_shader is a preprocessor guard. Do not pass it to API calls. #define VK_NV_mesh_shader 1 #define VK_NV_MESH_SHADER_SPEC_VERSION 1 #define VK_NV_MESH_SHADER_EXTENSION_NAME "VK_NV_mesh_shader" @@ -12813,6 +13000,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV( #endif +// VK_NV_fragment_shader_barycentric is a preprocessor guard. Do not pass it to API calls. #define VK_NV_fragment_shader_barycentric 1 #define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 #define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric" @@ -12820,6 +13008,7 @@ typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR VkPhysicalDeviceFra +// VK_NV_shader_image_footprint is a preprocessor guard. Do not pass it to API calls. #define VK_NV_shader_image_footprint 1 #define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2 #define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint" @@ -12831,6 +13020,7 @@ typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV { +// VK_NV_scissor_exclusive is a preprocessor guard. Do not pass it to API calls. #define VK_NV_scissor_exclusive 1 #define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 2 #define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive" @@ -12865,6 +13055,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV( #endif +// VK_NV_device_diagnostic_checkpoints is a preprocessor guard. Do not pass it to API calls. #define VK_NV_device_diagnostic_checkpoints 1 #define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2 #define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints" @@ -12896,6 +13087,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( #endif +// VK_INTEL_shader_integer_functions2 is a preprocessor guard. Do not pass it to API calls. #define VK_INTEL_shader_integer_functions2 1 #define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1 #define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2" @@ -12907,6 +13099,7 @@ typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { +// VK_INTEL_performance_query is a preprocessor guard. Do not pass it to API calls. #define VK_INTEL_performance_query 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) #define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2 @@ -13045,6 +13238,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL( #endif +// VK_EXT_pci_bus_info is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_pci_bus_info 1 #define VK_EXT_PCI_BUS_INFO_SPEC_VERSION 2 #define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info" @@ -13059,6 +13253,7 @@ typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT { +// VK_AMD_display_native_hdr is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_display_native_hdr 1 #define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1 #define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr" @@ -13084,6 +13279,7 @@ VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD( #endif +// VK_EXT_fragment_density_map is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_fragment_density_map 1 #define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 2 #define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map" @@ -13111,6 +13307,7 @@ typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT { +// VK_EXT_scalar_block_layout is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_scalar_block_layout 1 #define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1 #define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout" @@ -13118,6 +13315,7 @@ typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLay +// VK_GOOGLE_hlsl_functionality1 is a preprocessor guard. Do not pass it to API calls. #define VK_GOOGLE_hlsl_functionality1 1 #define VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION 1 #define VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" @@ -13125,11 +13323,13 @@ typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLay #define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME +// VK_GOOGLE_decorate_string is a preprocessor guard. Do not pass it to API calls. #define VK_GOOGLE_decorate_string 1 #define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1 #define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string" +// VK_EXT_subgroup_size_control is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_subgroup_size_control 1 #define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2 #define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control" @@ -13141,6 +13341,7 @@ typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkPipelineShaderStag +// VK_AMD_shader_core_properties2 is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_shader_core_properties2 1 #define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1 #define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2" @@ -13158,6 +13359,7 @@ typedef struct VkPhysicalDeviceShaderCoreProperties2AMD { +// VK_AMD_device_coherent_memory is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_device_coherent_memory 1 #define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1 #define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory" @@ -13169,6 +13371,7 @@ typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD { +// VK_EXT_shader_image_atomic_int64 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_image_atomic_int64 1 #define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1 #define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64" @@ -13181,6 +13384,7 @@ typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT { +// VK_EXT_memory_budget is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_memory_budget 1 #define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1 #define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget" @@ -13193,6 +13397,7 @@ typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT { +// VK_EXT_memory_priority is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_memory_priority 1 #define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1 #define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority" @@ -13210,6 +13415,7 @@ typedef struct VkMemoryPriorityAllocateInfoEXT { +// VK_NV_dedicated_allocation_image_aliasing is a preprocessor guard. Do not pass it to API calls. #define VK_NV_dedicated_allocation_image_aliasing 1 #define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1 #define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing" @@ -13221,6 +13427,7 @@ typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { +// VK_EXT_buffer_device_address is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_buffer_device_address 1 #define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2 #define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address" @@ -13251,6 +13458,7 @@ VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT( #endif +// VK_EXT_tooling_info is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_tooling_info 1 #define VK_EXT_TOOLING_INFO_SPEC_VERSION 1 #define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info" @@ -13270,6 +13478,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT( #endif +// VK_EXT_separate_stencil_usage is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_separate_stencil_usage 1 #define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1 #define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage" @@ -13277,6 +13486,7 @@ typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT; +// VK_EXT_validation_features is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_validation_features 1 #define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 5 #define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" @@ -13312,32 +13522,14 @@ typedef struct VkValidationFeaturesEXT { +// VK_NV_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. #define VK_NV_cooperative_matrix 1 #define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1 #define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix" +typedef VkComponentTypeKHR VkComponentTypeNV; -typedef enum VkComponentTypeNV { - VK_COMPONENT_TYPE_FLOAT16_NV = 0, - VK_COMPONENT_TYPE_FLOAT32_NV = 1, - VK_COMPONENT_TYPE_FLOAT64_NV = 2, - VK_COMPONENT_TYPE_SINT8_NV = 3, - VK_COMPONENT_TYPE_SINT16_NV = 4, - VK_COMPONENT_TYPE_SINT32_NV = 5, - VK_COMPONENT_TYPE_SINT64_NV = 6, - VK_COMPONENT_TYPE_UINT8_NV = 7, - VK_COMPONENT_TYPE_UINT16_NV = 8, - VK_COMPONENT_TYPE_UINT32_NV = 9, - VK_COMPONENT_TYPE_UINT64_NV = 10, - VK_COMPONENT_TYPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkComponentTypeNV; +typedef VkScopeKHR VkScopeNV; -typedef enum VkScopeNV { - VK_SCOPE_DEVICE_NV = 1, - VK_SCOPE_WORKGROUP_NV = 2, - VK_SCOPE_SUBGROUP_NV = 3, - VK_SCOPE_QUEUE_FAMILY_NV = 5, - VK_SCOPE_MAX_ENUM_NV = 0x7FFFFFFF -} VkScopeNV; typedef struct VkCooperativeMatrixPropertiesNV { VkStructureType sType; void* pNext; @@ -13374,6 +13566,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( #endif +// VK_NV_coverage_reduction_mode is a preprocessor guard. Do not pass it to API calls. #define VK_NV_coverage_reduction_mode 1 #define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1 #define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode" @@ -13416,6 +13609,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSampl #endif +// VK_EXT_fragment_shader_interlock is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_fragment_shader_interlock 1 #define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1 #define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock" @@ -13429,6 +13623,7 @@ typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT { +// VK_EXT_ycbcr_image_arrays is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_ycbcr_image_arrays 1 #define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1 #define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays" @@ -13440,6 +13635,7 @@ typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT { +// VK_EXT_provoking_vertex is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_provoking_vertex 1 #define VK_EXT_PROVOKING_VERTEX_SPEC_VERSION 1 #define VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME "VK_EXT_provoking_vertex" @@ -13471,6 +13667,7 @@ typedef struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT { +// VK_EXT_headless_surface is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_headless_surface 1 #define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1 #define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface" @@ -13492,6 +13689,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT( #endif +// VK_EXT_line_rasterization is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_line_rasterization 1 #define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1 #define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization" @@ -13539,6 +13737,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT( #endif +// VK_EXT_shader_atomic_float is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_atomic_float 1 #define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1 #define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float" @@ -13561,6 +13760,7 @@ typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT { +// VK_EXT_host_query_reset is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_host_query_reset 1 #define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1 #define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset" @@ -13577,6 +13777,7 @@ VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT( #endif +// VK_EXT_index_type_uint8 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_index_type_uint8 1 #define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1 #define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8" @@ -13588,6 +13789,7 @@ typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesEXT { +// VK_EXT_extended_dynamic_state is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_extended_dynamic_state 1 #define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1 #define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state" @@ -13672,6 +13874,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT( #endif +// VK_EXT_shader_atomic_float2 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_atomic_float2 1 #define VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION 1 #define VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME "VK_EXT_shader_atomic_float2" @@ -13694,6 +13897,7 @@ typedef struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT { +// VK_EXT_surface_maintenance1 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_surface_maintenance1 1 #define VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION 1 #define VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_surface_maintenance1" @@ -13738,6 +13942,7 @@ typedef struct VkSurfacePresentModeCompatibilityEXT { +// VK_EXT_swapchain_maintenance1 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_swapchain_maintenance1 1 #define VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION 1 #define VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_swapchain_maintenance1" @@ -13793,6 +13998,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkReleaseSwapchainImagesEXT( #endif +// VK_EXT_shader_demote_to_helper_invocation is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_demote_to_helper_invocation 1 #define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1 #define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation" @@ -13800,6 +14006,7 @@ typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures VkPhysicalDeviceS +// VK_NV_device_generated_commands is a preprocessor guard. Do not pass it to API calls. #define VK_NV_device_generated_commands 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) #define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 @@ -13991,6 +14198,7 @@ VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV( #endif +// VK_NV_inherited_viewport_scissor is a preprocessor guard. Do not pass it to API calls. #define VK_NV_inherited_viewport_scissor 1 #define VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION 1 #define VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME "VK_NV_inherited_viewport_scissor" @@ -14010,6 +14218,7 @@ typedef struct VkCommandBufferInheritanceViewportScissorInfoNV { +// VK_EXT_texel_buffer_alignment is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_texel_buffer_alignment 1 #define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 #define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" @@ -14023,6 +14232,7 @@ typedef VkPhysicalDeviceTexelBufferAlignmentProperties VkPhysicalDeviceTexelBuff +// VK_QCOM_render_pass_transform is a preprocessor guard. Do not pass it to API calls. #define VK_QCOM_render_pass_transform 1 #define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 3 #define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform" @@ -14041,6 +14251,7 @@ typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM { +// VK_EXT_depth_bias_control is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_depth_bias_control 1 #define VK_EXT_DEPTH_BIAS_CONTROL_SPEC_VERSION 1 #define VK_EXT_DEPTH_BIAS_CONTROL_EXTENSION_NAME "VK_EXT_depth_bias_control" @@ -14084,6 +14295,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias2EXT( #endif +// VK_EXT_device_memory_report is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_device_memory_report 1 #define VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION 2 #define VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME "VK_EXT_device_memory_report" @@ -14129,6 +14341,7 @@ typedef struct VkDeviceDeviceMemoryReportCreateInfoEXT { +// VK_EXT_acquire_drm_display is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_acquire_drm_display 1 #define VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION 1 #define VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_drm_display" @@ -14149,6 +14362,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetDrmDisplayEXT( #endif +// VK_EXT_robustness2 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_robustness2 1 #define VK_EXT_ROBUSTNESS_2_SPEC_VERSION 1 #define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2" @@ -14169,6 +14383,7 @@ typedef struct VkPhysicalDeviceRobustness2PropertiesEXT { +// VK_EXT_custom_border_color is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_custom_border_color 1 #define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12 #define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color" @@ -14194,11 +14409,13 @@ typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT { +// VK_GOOGLE_user_type is a preprocessor guard. Do not pass it to API calls. #define VK_GOOGLE_user_type 1 #define VK_GOOGLE_USER_TYPE_SPEC_VERSION 1 #define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type" +// VK_NV_present_barrier is a preprocessor guard. Do not pass it to API calls. #define VK_NV_present_barrier 1 #define VK_NV_PRESENT_BARRIER_SPEC_VERSION 1 #define VK_NV_PRESENT_BARRIER_EXTENSION_NAME "VK_NV_present_barrier" @@ -14222,6 +14439,7 @@ typedef struct VkSwapchainPresentBarrierCreateInfoNV { +// VK_EXT_private_data is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_private_data 1 typedef VkPrivateDataSlot VkPrivateDataSlotEXT; @@ -14268,6 +14486,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT( #endif +// VK_EXT_pipeline_creation_cache_control is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_pipeline_creation_cache_control 1 #define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3 #define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control" @@ -14275,6 +14494,7 @@ typedef VkPhysicalDevicePipelineCreationCacheControlFeatures VkPhysicalDevicePip +// VK_NV_device_diagnostics_config is a preprocessor guard. Do not pass it to API calls. #define VK_NV_device_diagnostics_config 1 #define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 2 #define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config" @@ -14301,11 +14521,13 @@ typedef struct VkDeviceDiagnosticsConfigCreateInfoNV { +// VK_QCOM_render_pass_store_ops is a preprocessor guard. Do not pass it to API calls. #define VK_QCOM_render_pass_store_ops 1 #define VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION 2 #define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" +// VK_NV_low_latency is a preprocessor guard. Do not pass it to API calls. #define VK_NV_low_latency 1 #define VK_NV_LOW_LATENCY_SPEC_VERSION 1 #define VK_NV_LOW_LATENCY_EXTENSION_NAME "VK_NV_low_latency" @@ -14317,6 +14539,7 @@ typedef struct VkQueryLowLatencySupportNV { +// VK_EXT_descriptor_buffer is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_descriptor_buffer 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) #define VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION 1 @@ -14529,6 +14752,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureOpaqueCaptureDescriptor #endif +// VK_EXT_graphics_pipeline_library is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_graphics_pipeline_library 1 #define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION 1 #define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME "VK_EXT_graphics_pipeline_library" @@ -14562,6 +14786,7 @@ typedef struct VkGraphicsPipelineLibraryCreateInfoEXT { +// VK_AMD_shader_early_and_late_fragment_tests is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_shader_early_and_late_fragment_tests 1 #define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION 1 #define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME "VK_AMD_shader_early_and_late_fragment_tests" @@ -14573,6 +14798,7 @@ typedef struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD { +// VK_NV_fragment_shading_rate_enums is a preprocessor guard. Do not pass it to API calls. #define VK_NV_fragment_shading_rate_enums 1 #define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1 #define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums" @@ -14630,6 +14856,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateEnumNV( #endif +// VK_NV_ray_tracing_motion_blur is a preprocessor guard. Do not pass it to API calls. #define VK_NV_ray_tracing_motion_blur 1 #define VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION 1 #define VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME "VK_NV_ray_tracing_motion_blur" @@ -14720,6 +14947,7 @@ typedef struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV { +// VK_EXT_ycbcr_2plane_444_formats is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_ycbcr_2plane_444_formats 1 #define VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION 1 #define VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME "VK_EXT_ycbcr_2plane_444_formats" @@ -14731,6 +14959,7 @@ typedef struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT { +// VK_EXT_fragment_density_map2 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_fragment_density_map2 1 #define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1 #define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2" @@ -14751,6 +14980,7 @@ typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT { +// VK_QCOM_rotated_copy_commands is a preprocessor guard. Do not pass it to API calls. #define VK_QCOM_rotated_copy_commands 1 #define VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION 1 #define VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME "VK_QCOM_rotated_copy_commands" @@ -14762,6 +14992,7 @@ typedef struct VkCopyCommandTransformInfoQCOM { +// VK_EXT_image_robustness is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_image_robustness 1 #define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1 #define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness" @@ -14769,6 +15000,7 @@ typedef VkPhysicalDeviceImageRobustnessFeatures VkPhysicalDeviceImageRobustnessF +// VK_EXT_image_compression_control is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_image_compression_control 1 #define VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION 1 #define VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME "VK_EXT_image_compression_control" @@ -14855,6 +15087,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT( #endif +// VK_EXT_attachment_feedback_loop_layout is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_attachment_feedback_loop_layout 1 #define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION 2 #define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_layout" @@ -14866,6 +15099,7 @@ typedef struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT { +// VK_EXT_4444_formats is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_4444_formats 1 #define VK_EXT_4444_FORMATS_SPEC_VERSION 1 #define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats" @@ -14878,6 +15112,7 @@ typedef struct VkPhysicalDevice4444FormatsFeaturesEXT { +// VK_EXT_device_fault is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_device_fault 1 #define VK_EXT_DEVICE_FAULT_SPEC_VERSION 2 #define VK_EXT_DEVICE_FAULT_EXTENSION_NAME "VK_EXT_device_fault" @@ -14957,6 +15192,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceFaultInfoEXT( #endif +// VK_ARM_rasterization_order_attachment_access is a preprocessor guard. Do not pass it to API calls. #define VK_ARM_rasterization_order_attachment_access 1 #define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 #define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_ARM_rasterization_order_attachment_access" @@ -14972,6 +15208,7 @@ typedef VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT VkPhysical +// VK_EXT_rgba10x6_formats is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_rgba10x6_formats 1 #define VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION 1 #define VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME "VK_EXT_rgba10x6_formats" @@ -14983,6 +15220,7 @@ typedef struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT { +// VK_VALVE_mutable_descriptor_type is a preprocessor guard. Do not pass it to API calls. #define VK_VALVE_mutable_descriptor_type 1 #define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 #define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type" @@ -15012,6 +15250,7 @@ typedef VkMutableDescriptorTypeCreateInfoEXT VkMutableDescriptorTypeCreateInfoVA +// VK_EXT_vertex_input_dynamic_state is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_vertex_input_dynamic_state 1 #define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION 2 #define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_vertex_input_dynamic_state" @@ -15051,6 +15290,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT( #endif +// VK_EXT_physical_device_drm is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_physical_device_drm 1 #define VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION 1 #define VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME "VK_EXT_physical_device_drm" @@ -15067,6 +15307,7 @@ typedef struct VkPhysicalDeviceDrmPropertiesEXT { +// VK_EXT_device_address_binding_report is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_device_address_binding_report 1 #define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION 1 #define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME "VK_EXT_device_address_binding_report" @@ -15099,6 +15340,7 @@ typedef struct VkDeviceAddressBindingCallbackDataEXT { +// VK_EXT_depth_clip_control is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_depth_clip_control 1 #define VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION 1 #define VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clip_control" @@ -15116,6 +15358,7 @@ typedef struct VkPipelineViewportDepthClipControlCreateInfoEXT { +// VK_EXT_primitive_topology_list_restart is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_primitive_topology_list_restart 1 #define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION 1 #define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME "VK_EXT_primitive_topology_list_restart" @@ -15128,6 +15371,7 @@ typedef struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT { +// VK_HUAWEI_subpass_shading is a preprocessor guard. Do not pass it to API calls. #define VK_HUAWEI_subpass_shading 1 #define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 3 #define VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME "VK_HUAWEI_subpass_shading" @@ -15164,6 +15408,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSubpassShadingHUAWEI( #endif +// VK_HUAWEI_invocation_mask is a preprocessor guard. Do not pass it to API calls. #define VK_HUAWEI_invocation_mask 1 #define VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION 1 #define VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME "VK_HUAWEI_invocation_mask" @@ -15183,6 +15428,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBindInvocationMaskHUAWEI( #endif +// VK_NV_external_memory_rdma is a preprocessor guard. Do not pass it to API calls. #define VK_NV_external_memory_rdma 1 typedef void* VkRemoteAddressNV; #define VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION 1 @@ -15210,6 +15456,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryRemoteAddressNV( #endif +// VK_EXT_pipeline_properties is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_pipeline_properties 1 #define VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION 1 #define VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME "VK_EXT_pipeline_properties" @@ -15237,6 +15484,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelinePropertiesEXT( #endif +// VK_EXT_multisampled_render_to_single_sampled is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_multisampled_render_to_single_sampled 1 #define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION 1 #define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME "VK_EXT_multisampled_render_to_single_sampled" @@ -15261,6 +15509,7 @@ typedef struct VkMultisampledRenderToSingleSampledInfoEXT { +// VK_EXT_extended_dynamic_state2 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_extended_dynamic_state2 1 #define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1 #define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2" @@ -15301,6 +15550,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnableEXT( #endif +// VK_EXT_color_write_enable is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_color_write_enable 1 #define VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION 1 #define VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME "VK_EXT_color_write_enable" @@ -15327,6 +15577,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWrite #endif +// VK_EXT_primitives_generated_query is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_primitives_generated_query 1 #define VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION 1 #define VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME "VK_EXT_primitives_generated_query" @@ -15340,6 +15591,7 @@ typedef struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT { +// VK_EXT_global_priority_query is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_global_priority_query 1 #define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1 #define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query" @@ -15350,6 +15602,7 @@ typedef VkQueueFamilyGlobalPriorityPropertiesKHR VkQueueFamilyGlobalPriorityProp +// VK_EXT_image_view_min_lod is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_image_view_min_lod 1 #define VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION 1 #define VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME "VK_EXT_image_view_min_lod" @@ -15367,6 +15620,7 @@ typedef struct VkImageViewMinLodCreateInfoEXT { +// VK_EXT_multi_draw is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_multi_draw 1 #define VK_EXT_MULTI_DRAW_SPEC_VERSION 1 #define VK_EXT_MULTI_DRAW_EXTENSION_NAME "VK_EXT_multi_draw" @@ -15416,6 +15670,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiIndexedEXT( #endif +// VK_EXT_image_2d_view_of_3d is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_image_2d_view_of_3d 1 #define VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION 1 #define VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_2d_view_of_3d" @@ -15428,6 +15683,7 @@ typedef struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT { +// VK_EXT_shader_tile_image is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_tile_image 1 #define VK_EXT_SHADER_TILE_IMAGE_SPEC_VERSION 1 #define VK_EXT_SHADER_TILE_IMAGE_EXTENSION_NAME "VK_EXT_shader_tile_image" @@ -15449,6 +15705,7 @@ typedef struct VkPhysicalDeviceShaderTileImagePropertiesEXT { +// VK_EXT_opacity_micromap is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_opacity_micromap 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkMicromapEXT) #define VK_EXT_OPACITY_MICROMAP_SPEC_VERSION 2 @@ -15720,11 +15977,13 @@ VKAPI_ATTR void VKAPI_CALL vkGetMicromapBuildSizesEXT( #endif +// VK_EXT_load_store_op_none is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_load_store_op_none 1 #define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1 #define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none" +// VK_HUAWEI_cluster_culling_shader is a preprocessor guard. Do not pass it to API calls. #define VK_HUAWEI_cluster_culling_shader 1 #define VK_HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION 2 #define VK_HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME "VK_HUAWEI_cluster_culling_shader" @@ -15761,6 +16020,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawClusterIndirectHUAWEI( #endif +// VK_EXT_border_color_swizzle is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_border_color_swizzle 1 #define VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION 1 #define VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME "VK_EXT_border_color_swizzle" @@ -15780,6 +16040,7 @@ typedef struct VkSamplerBorderColorComponentMappingCreateInfoEXT { +// VK_EXT_pageable_device_local_memory is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_pageable_device_local_memory 1 #define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION 1 #define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME "VK_EXT_pageable_device_local_memory" @@ -15799,6 +16060,7 @@ VKAPI_ATTR void VKAPI_CALL vkSetDeviceMemoryPriorityEXT( #endif +// VK_ARM_shader_core_properties is a preprocessor guard. Do not pass it to API calls. #define VK_ARM_shader_core_properties 1 #define VK_ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION 1 #define VK_ARM_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_ARM_shader_core_properties" @@ -15812,6 +16074,7 @@ typedef struct VkPhysicalDeviceShaderCorePropertiesARM { +// VK_EXT_image_sliced_view_of_3d is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_image_sliced_view_of_3d 1 #define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION 1 #define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_sliced_view_of_3d" @@ -15831,6 +16094,7 @@ typedef struct VkImageViewSlicedCreateInfoEXT { +// VK_VALVE_descriptor_set_host_mapping is a preprocessor guard. Do not pass it to API calls. #define VK_VALVE_descriptor_set_host_mapping 1 #define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION 1 #define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME "VK_VALVE_descriptor_set_host_mapping" @@ -15870,6 +16134,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetHostMappingVALVE( #endif +// VK_EXT_depth_clamp_zero_one is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_depth_clamp_zero_one 1 #define VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION 1 #define VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME "VK_EXT_depth_clamp_zero_one" @@ -15881,6 +16146,7 @@ typedef struct VkPhysicalDeviceDepthClampZeroOneFeaturesEXT { +// VK_EXT_non_seamless_cube_map is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_non_seamless_cube_map 1 #define VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION 1 #define VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME "VK_EXT_non_seamless_cube_map" @@ -15892,6 +16158,7 @@ typedef struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT { +// VK_QCOM_fragment_density_map_offset is a preprocessor guard. Do not pass it to API calls. #define VK_QCOM_fragment_density_map_offset 1 #define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 1 #define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset" @@ -15916,6 +16183,7 @@ typedef struct VkSubpassFragmentDensityMapOffsetEndInfoQCOM { +// VK_NV_copy_memory_indirect is a preprocessor guard. Do not pass it to API calls. #define VK_NV_copy_memory_indirect 1 #define VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION 1 #define VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME "VK_NV_copy_memory_indirect" @@ -15967,6 +16235,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToImageIndirectNV( #endif +// VK_NV_memory_decompression is a preprocessor guard. Do not pass it to API calls. #define VK_NV_memory_decompression 1 #define VK_NV_MEMORY_DECOMPRESSION_SPEC_VERSION 1 #define VK_NV_MEMORY_DECOMPRESSION_EXTENSION_NAME "VK_NV_memory_decompression" @@ -16014,6 +16283,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryIndirectCountNV( #endif +// VK_NV_linear_color_attachment is a preprocessor guard. Do not pass it to API calls. #define VK_NV_linear_color_attachment 1 #define VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION 1 #define VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME "VK_NV_linear_color_attachment" @@ -16025,11 +16295,13 @@ typedef struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV { +// VK_GOOGLE_surfaceless_query is a preprocessor guard. Do not pass it to API calls. #define VK_GOOGLE_surfaceless_query 1 #define VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION 2 #define VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME "VK_GOOGLE_surfaceless_query" +// VK_EXT_image_compression_control_swapchain is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_image_compression_control_swapchain 1 #define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION 1 #define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME "VK_EXT_image_compression_control_swapchain" @@ -16041,6 +16313,7 @@ typedef struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT { +// VK_QCOM_image_processing is a preprocessor guard. Do not pass it to API calls. #define VK_QCOM_image_processing 1 #define VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION 1 #define VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME "VK_QCOM_image_processing" @@ -16071,6 +16344,7 @@ typedef struct VkPhysicalDeviceImageProcessingPropertiesQCOM { +// VK_EXT_external_memory_acquire_unmodified is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_external_memory_acquire_unmodified 1 #define VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_SPEC_VERSION 1 #define VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXTENSION_NAME "VK_EXT_external_memory_acquire_unmodified" @@ -16082,6 +16356,7 @@ typedef struct VkExternalMemoryAcquireUnmodifiedEXT { +// VK_EXT_extended_dynamic_state3 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_extended_dynamic_state3 1 #define VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION 2 #define VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME "VK_EXT_extended_dynamic_state3" @@ -16315,6 +16590,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageReductionModeNV( #endif +// VK_EXT_subpass_merge_feedback is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_subpass_merge_feedback 1 #define VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION 2 #define VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME "VK_EXT_subpass_merge_feedback" @@ -16372,6 +16648,7 @@ typedef struct VkRenderPassSubpassFeedbackCreateInfoEXT { +// VK_LUNARG_direct_driver_loading is a preprocessor guard. Do not pass it to API calls. #define VK_LUNARG_direct_driver_loading 1 #define VK_LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION 1 #define VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME "VK_LUNARG_direct_driver_loading" @@ -16402,6 +16679,7 @@ typedef struct VkDirectDriverLoadingListLUNARG { +// VK_EXT_shader_module_identifier is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_module_identifier 1 #define VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT 32U #define VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION 1 @@ -16448,11 +16726,13 @@ VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleCreateInfoIdentifierEXT( #endif +// VK_EXT_rasterization_order_attachment_access is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_rasterization_order_attachment_access 1 #define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 #define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_EXT_rasterization_order_attachment_access" +// VK_NV_optical_flow is a preprocessor guard. Do not pass it to API calls. #define VK_NV_optical_flow 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkOpticalFlowSessionNV) #define VK_NV_OPTICAL_FLOW_SPEC_VERSION 1 @@ -16617,6 +16897,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdOpticalFlowExecuteNV( #endif +// VK_EXT_legacy_dithering is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_legacy_dithering 1 #define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 1 #define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering" @@ -16628,6 +16909,7 @@ typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT { +// VK_EXT_pipeline_protected_access is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_pipeline_protected_access 1 #define VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION 1 #define VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME "VK_EXT_pipeline_protected_access" @@ -16639,6 +16921,7 @@ typedef struct VkPhysicalDevicePipelineProtectedAccessFeaturesEXT { +// VK_EXT_shader_object is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_object 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderEXT) #define VK_EXT_SHADER_OBJECT_SPEC_VERSION 1 @@ -16725,6 +17008,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBindShadersEXT( #endif +// VK_QCOM_tile_properties is a preprocessor guard. Do not pass it to API calls. #define VK_QCOM_tile_properties 1 #define VK_QCOM_TILE_PROPERTIES_SPEC_VERSION 1 #define VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME "VK_QCOM_tile_properties" @@ -16759,6 +17043,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetDynamicRenderingTilePropertiesQCOM( #endif +// VK_SEC_amigo_profiling is a preprocessor guard. Do not pass it to API calls. #define VK_SEC_amigo_profiling 1 #define VK_SEC_AMIGO_PROFILING_SPEC_VERSION 1 #define VK_SEC_AMIGO_PROFILING_EXTENSION_NAME "VK_SEC_amigo_profiling" @@ -16777,6 +17062,7 @@ typedef struct VkAmigoProfilingSubmitInfoSEC { +// VK_QCOM_multiview_per_view_viewports is a preprocessor guard. Do not pass it to API calls. #define VK_QCOM_multiview_per_view_viewports 1 #define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION 1 #define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_EXTENSION_NAME "VK_QCOM_multiview_per_view_viewports" @@ -16788,6 +17074,7 @@ typedef struct VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM { +// VK_NV_ray_tracing_invocation_reorder is a preprocessor guard. Do not pass it to API calls. #define VK_NV_ray_tracing_invocation_reorder 1 #define VK_NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION 1 #define VK_NV_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME "VK_NV_ray_tracing_invocation_reorder" @@ -16811,11 +17098,13 @@ typedef struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV { +// VK_EXT_mutable_descriptor_type is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_mutable_descriptor_type 1 #define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 #define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_EXT_mutable_descriptor_type" +// VK_ARM_shader_core_builtins is a preprocessor guard. Do not pass it to API calls. #define VK_ARM_shader_core_builtins 1 #define VK_ARM_SHADER_CORE_BUILTINS_SPEC_VERSION 2 #define VK_ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME "VK_ARM_shader_core_builtins" @@ -16835,6 +17124,7 @@ typedef struct VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM { +// VK_EXT_pipeline_library_group_handles is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_pipeline_library_group_handles 1 #define VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_SPEC_VERSION 1 #define VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_EXTENSION_NAME "VK_EXT_pipeline_library_group_handles" @@ -16846,6 +17136,7 @@ typedef struct VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT { +// VK_EXT_dynamic_rendering_unused_attachments is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_dynamic_rendering_unused_attachments 1 #define VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION 1 #define VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME "VK_EXT_dynamic_rendering_unused_attachments" @@ -16857,6 +17148,7 @@ typedef struct VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT { +// VK_QCOM_multiview_per_view_render_areas is a preprocessor guard. Do not pass it to API calls. #define VK_QCOM_multiview_per_view_render_areas 1 #define VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION 1 #define VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_EXTENSION_NAME "VK_QCOM_multiview_per_view_render_areas" @@ -16875,6 +17167,7 @@ typedef struct VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM { +// VK_EXT_attachment_feedback_loop_dynamic_state is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_attachment_feedback_loop_dynamic_state 1 #define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_SPEC_VERSION 1 #define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_dynamic_state" @@ -16893,6 +17186,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetAttachmentFeedbackLoopEnableEXT( #endif +// VK_KHR_acceleration_structure is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_acceleration_structure 1 #define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 #define VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_KHR_acceleration_structure" @@ -17168,6 +17462,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureBuildSizesKHR( #endif +// VK_KHR_ray_tracing_pipeline is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_ray_tracing_pipeline 1 #define VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION 1 #define VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME "VK_KHR_ray_tracing_pipeline" @@ -17304,6 +17599,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetRayTracingPipelineStackSizeKHR( #endif +// VK_KHR_ray_query is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_ray_query 1 #define VK_KHR_RAY_QUERY_SPEC_VERSION 1 #define VK_KHR_RAY_QUERY_EXTENSION_NAME "VK_KHR_ray_query" @@ -17315,6 +17611,7 @@ typedef struct VkPhysicalDeviceRayQueryFeaturesKHR { +// VK_EXT_mesh_shader is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_mesh_shader 1 #define VK_EXT_MESH_SHADER_SPEC_VERSION 1 #define VK_EXT_MESH_SHADER_EXTENSION_NAME "VK_EXT_mesh_shader" diff --git a/include/vulkan/vulkan_directfb.h b/include/vulkan/vulkan_directfb.h index 81d22fa..1f11a08 100644 --- a/include/vulkan/vulkan_directfb.h +++ b/include/vulkan/vulkan_directfb.h @@ -19,6 +19,7 @@ extern "C" { +// VK_EXT_directfb_surface is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_directfb_surface 1 #define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1 #define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface" diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp index 9dc2a33..f5b118c 100644 --- a/include/vulkan/vulkan_enums.hpp +++ b/include/vulkan/vulkan_enums.hpp @@ -5628,31 +5628,6 @@ namespace VULKAN_HPP_NAMESPACE eShaderValidationCache = VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT }; - //=== VK_NV_cooperative_matrix === - - enum class ScopeNV - { - eDevice = VK_SCOPE_DEVICE_NV, - eWorkgroup = VK_SCOPE_WORKGROUP_NV, - eSubgroup = VK_SCOPE_SUBGROUP_NV, - eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV - }; - - enum class ComponentTypeNV - { - eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV, - eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV, - eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV, - eSint8 = VK_COMPONENT_TYPE_SINT8_NV, - eSint16 = VK_COMPONENT_TYPE_SINT16_NV, - eSint32 = VK_COMPONENT_TYPE_SINT32_NV, - eSint64 = VK_COMPONENT_TYPE_SINT64_NV, - eUint8 = VK_COMPONENT_TYPE_UINT8_NV, - eUint16 = VK_COMPONENT_TYPE_UINT16_NV, - eUint32 = VK_COMPONENT_TYPE_UINT32_NV, - eUint64 = VK_COMPONENT_TYPE_UINT64_NV - }; - //=== VK_NV_coverage_reduction_mode === enum class CoverageReductionModeNV @@ -6646,6 +6621,7 @@ namespace VULKAN_HPP_NAMESPACE eSubgroup = VK_SCOPE_SUBGROUP_KHR, eQueueFamily = VK_SCOPE_QUEUE_FAMILY_KHR }; + using ScopeNV = ScopeKHR; enum class ComponentTypeKHR { @@ -6661,6 +6637,7 @@ namespace VULKAN_HPP_NAMESPACE eUint32 = VK_COMPONENT_TYPE_UINT32_KHR, eUint64 = VK_COMPONENT_TYPE_UINT64_KHR }; + using ComponentTypeNV = ComponentTypeKHR; //========================= //=== Index Type Traits === diff --git a/include/vulkan/vulkan_fuchsia.h b/include/vulkan/vulkan_fuchsia.h index c11d9e6..76e1564 100644 --- a/include/vulkan/vulkan_fuchsia.h +++ b/include/vulkan/vulkan_fuchsia.h @@ -19,6 +19,7 @@ extern "C" { +// VK_FUCHSIA_imagepipe_surface is a preprocessor guard. Do not pass it to API calls. #define VK_FUCHSIA_imagepipe_surface 1 #define VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION 1 #define VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME "VK_FUCHSIA_imagepipe_surface" @@ -41,6 +42,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA( #endif +// VK_FUCHSIA_external_memory is a preprocessor guard. Do not pass it to API calls. #define VK_FUCHSIA_external_memory 1 #define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1 #define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory" @@ -81,6 +83,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA( #endif +// VK_FUCHSIA_external_semaphore is a preprocessor guard. Do not pass it to API calls. #define VK_FUCHSIA_external_semaphore 1 #define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 #define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore" @@ -115,6 +118,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA( #endif +// VK_FUCHSIA_buffer_collection is a preprocessor guard. Do not pass it to API calls. #define VK_FUCHSIA_buffer_collection 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferCollectionFUCHSIA) #define VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION 2 diff --git a/include/vulkan/vulkan_ggp.h b/include/vulkan/vulkan_ggp.h index 2c0d5ed..9783aa3 100644 --- a/include/vulkan/vulkan_ggp.h +++ b/include/vulkan/vulkan_ggp.h @@ -19,6 +19,7 @@ extern "C" { +// VK_GGP_stream_descriptor_surface is a preprocessor guard. Do not pass it to API calls. #define VK_GGP_stream_descriptor_surface 1 #define VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION 1 #define VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME "VK_GGP_stream_descriptor_surface" @@ -41,6 +42,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateStreamDescriptorSurfaceGGP( #endif +// VK_GGP_frame_token is a preprocessor guard. Do not pass it to API calls. #define VK_GGP_frame_token 1 #define VK_GGP_FRAME_TOKEN_SPEC_VERSION 1 #define VK_GGP_FRAME_TOKEN_EXTENSION_NAME "VK_GGP_frame_token" diff --git a/include/vulkan/vulkan_ios.h b/include/vulkan/vulkan_ios.h index 8c6d9e7..211429f 100644 --- a/include/vulkan/vulkan_ios.h +++ b/include/vulkan/vulkan_ios.h @@ -19,6 +19,7 @@ extern "C" { +// VK_MVK_ios_surface is a preprocessor guard. Do not pass it to API calls. #define VK_MVK_ios_surface 1 #define VK_MVK_IOS_SURFACE_SPEC_VERSION 3 #define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" diff --git a/include/vulkan/vulkan_macos.h b/include/vulkan/vulkan_macos.h index 3310e11..c6509cc 100644 --- a/include/vulkan/vulkan_macos.h +++ b/include/vulkan/vulkan_macos.h @@ -19,6 +19,7 @@ extern "C" { +// VK_MVK_macos_surface is a preprocessor guard. Do not pass it to API calls. #define VK_MVK_macos_surface 1 #define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3 #define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" diff --git a/include/vulkan/vulkan_metal.h b/include/vulkan/vulkan_metal.h index 7127651..94563a0 100644 --- a/include/vulkan/vulkan_metal.h +++ b/include/vulkan/vulkan_metal.h @@ -19,6 +19,7 @@ extern "C" { +// VK_EXT_metal_surface is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_metal_surface 1 #ifdef __OBJC__ @class CAMetalLayer; @@ -47,6 +48,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT( #endif +// VK_EXT_metal_objects is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_metal_objects 1 #ifdef __OBJC__ @protocol MTLDevice; diff --git a/include/vulkan/vulkan_screen.h b/include/vulkan/vulkan_screen.h index d2ccf47..981738f 100644 --- a/include/vulkan/vulkan_screen.h +++ b/include/vulkan/vulkan_screen.h @@ -19,6 +19,7 @@ extern "C" { +// VK_QNX_screen_surface is a preprocessor guard. Do not pass it to API calls. #define VK_QNX_screen_surface 1 #define VK_QNX_SCREEN_SURFACE_SPEC_VERSION 1 #define VK_QNX_SCREEN_SURFACE_EXTENSION_NAME "VK_QNX_screen_surface" @@ -48,6 +49,7 @@ VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceScreenPresentationSupportQNX( #endif +// VK_QNX_external_memory_screen_buffer is a preprocessor guard. Do not pass it to API calls. #define VK_QNX_external_memory_screen_buffer 1 #define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION 1 #define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME "VK_QNX_external_memory_screen_buffer" diff --git a/include/vulkan/vulkan_structs.hpp b/include/vulkan/vulkan_structs.hpp index 162e2af..ab71413 100644 --- a/include/vulkan/vulkan_structs.hpp +++ b/include/vulkan/vulkan_structs.hpp @@ -15852,11 +15852,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, - VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, - VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice, + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = {}, + VULKAN_HPP_NAMESPACE::ScopeNV scope_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) , MSize( MSize_ ) @@ -15998,11 +15998,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t MSize = {}; uint32_t NSize = {}; uint32_t KSize = {}; - VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; - VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice; + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = {}; + VULKAN_HPP_NAMESPACE::ScopeNV scope = {}; }; template <> diff --git a/include/vulkan/vulkan_to_string.hpp b/include/vulkan/vulkan_to_string.hpp index 359fa6d..a0a1431 100644 --- a/include/vulkan/vulkan_to_string.hpp +++ b/include/vulkan/vulkan_to_string.hpp @@ -7700,39 +7700,6 @@ namespace VULKAN_HPP_NAMESPACE } } - //=== VK_NV_cooperative_matrix === - - VULKAN_HPP_INLINE std::string to_string( ScopeNV value ) - { - switch ( value ) - { - case ScopeNV::eDevice: return "Device"; - case ScopeNV::eWorkgroup: return "Workgroup"; - case ScopeNV::eSubgroup: return "Subgroup"; - case ScopeNV::eQueueFamily: return "QueueFamily"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - - VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value ) - { - switch ( value ) - { - case ComponentTypeNV::eFloat16: return "Float16"; - case ComponentTypeNV::eFloat32: return "Float32"; - case ComponentTypeNV::eFloat64: return "Float64"; - case ComponentTypeNV::eSint8: return "Sint8"; - case ComponentTypeNV::eSint16: return "Sint16"; - case ComponentTypeNV::eSint32: return "Sint32"; - case ComponentTypeNV::eSint64: return "Sint64"; - case ComponentTypeNV::eUint8: return "Uint8"; - case ComponentTypeNV::eUint16: return "Uint16"; - case ComponentTypeNV::eUint32: return "Uint32"; - case ComponentTypeNV::eUint64: return "Uint64"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - //=== VK_NV_coverage_reduction_mode === VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value ) diff --git a/include/vulkan/vulkan_vi.h b/include/vulkan/vulkan_vi.h index 6726470..c9227e8 100644 --- a/include/vulkan/vulkan_vi.h +++ b/include/vulkan/vulkan_vi.h @@ -19,6 +19,7 @@ extern "C" { +// VK_NN_vi_surface is a preprocessor guard. Do not pass it to API calls. #define VK_NN_vi_surface 1 #define VK_NN_VI_SURFACE_SPEC_VERSION 1 #define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" diff --git a/include/vulkan/vulkan_wayland.h b/include/vulkan/vulkan_wayland.h index 42b2801..c93b217 100644 --- a/include/vulkan/vulkan_wayland.h +++ b/include/vulkan/vulkan_wayland.h @@ -19,6 +19,7 @@ extern "C" { +// VK_KHR_wayland_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_wayland_surface 1 #define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6 #define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" diff --git a/include/vulkan/vulkan_win32.h b/include/vulkan/vulkan_win32.h index 5b65a36..fae3b85 100644 --- a/include/vulkan/vulkan_win32.h +++ b/include/vulkan/vulkan_win32.h @@ -19,6 +19,7 @@ extern "C" { +// VK_KHR_win32_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_win32_surface 1 #define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6 #define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" @@ -47,6 +48,7 @@ VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( #endif +// VK_KHR_external_memory_win32 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_memory_win32 1 #define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32" @@ -96,6 +98,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR( #endif +// VK_KHR_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_win32_keyed_mutex 1 #define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1 #define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex" @@ -113,6 +116,7 @@ typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR { +// VK_KHR_external_semaphore_win32 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_semaphore_win32 1 #define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32" @@ -165,6 +169,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR( #endif +// VK_KHR_external_fence_win32 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_external_fence_win32 1 #define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1 #define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32" @@ -208,6 +213,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR( #endif +// VK_NV_external_memory_win32 is a preprocessor guard. Do not pass it to API calls. #define VK_NV_external_memory_win32 1 #define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 #define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32" @@ -236,6 +242,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV( #endif +// VK_NV_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls. #define VK_NV_win32_keyed_mutex 1 #define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 2 #define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex" @@ -253,6 +260,7 @@ typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV { +// VK_EXT_full_screen_exclusive is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_full_screen_exclusive 1 #define VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION 4 #define VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME "VK_EXT_full_screen_exclusive" @@ -309,6 +317,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModes2EXT( #endif +// VK_NV_acquire_winrt_display is a preprocessor guard. Do not pass it to API calls. #define VK_NV_acquire_winrt_display 1 #define VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION 1 #define VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME "VK_NV_acquire_winrt_display" diff --git a/include/vulkan/vulkan_xcb.h b/include/vulkan/vulkan_xcb.h index b49aa4e..de74055 100644 --- a/include/vulkan/vulkan_xcb.h +++ b/include/vulkan/vulkan_xcb.h @@ -19,6 +19,7 @@ extern "C" { +// VK_KHR_xcb_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_xcb_surface 1 #define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 #define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" diff --git a/include/vulkan/vulkan_xlib.h b/include/vulkan/vulkan_xlib.h index 186cdd5..1aa632f 100644 --- a/include/vulkan/vulkan_xlib.h +++ b/include/vulkan/vulkan_xlib.h @@ -19,6 +19,7 @@ extern "C" { +// VK_KHR_xlib_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_xlib_surface 1 #define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 #define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" diff --git a/include/vulkan/vulkan_xlib_xrandr.h b/include/vulkan/vulkan_xlib_xrandr.h index 9a3b629..e164ffc 100644 --- a/include/vulkan/vulkan_xlib_xrandr.h +++ b/include/vulkan/vulkan_xlib_xrandr.h @@ -19,6 +19,7 @@ extern "C" { +// VK_EXT_acquire_xlib_display is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_acquire_xlib_display 1 #define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1 #define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display" diff --git a/registry/cgenerator.py b/registry/cgenerator.py index 41a39f4..ef8d681 100644 --- a/registry/cgenerator.py +++ b/registry/cgenerator.py @@ -255,6 +255,8 @@ class COutputGenerator(OutputGenerator): write('#ifdef', self.featureExtraProtect, file=self.outFile) self.newline() + # Generate warning of possible use in IDEs + write(f'// {self.featureName} is a preprocessor guard. Do not pass it to API calls.', file=self.outFile) write('#define', self.featureName, '1', file=self.outFile) for section in self.TYPE_SECTIONS: contents = self.sections[section] diff --git a/registry/validusage.json b/registry/validusage.json index 5546e28..9e9106c 100644 --- a/registry/validusage.json +++ b/registry/validusage.json @@ -1,9 +1,9 @@ { "version info": { "schema version": 2, - "api version": "1.3.256", - "comment": "from git branch: github-main commit: 3dae5d7fbf332970ae0a97d5ab05ae5db93e62f0", - "date": "2023-06-30 12:46:10Z" + "api version": "1.3.257", + "comment": "from git branch: github-main commit: 2b7d9c9f65cde43580573a677490c3c7d099c09f", + "date": "2023-07-07 12:31:11Z" }, "validation": { "vkGetInstanceProcAddr": { @@ -422,7 +422,7 @@ }, { "vuid": "VUID-VkQueueFamilyGlobalPriorityPropertiesKHR-priorities-parameter", - "text": " Any given element of priorities must be a valid VkQueueGlobalPriorityKHR value" + "text": " Each element of priorities must be a valid VkQueueGlobalPriorityKHR value" } ] }, @@ -6444,6 +6444,14 @@ "vuid": "VUID-VkRenderingInfo-multisampledRenderToSingleSampled-06857", "text": " If none of the VK_AMD_mixed_attachment_samples extension, the VK_NV_framebuffer_mixed_samples extension, or the multisampledRenderToSingleSampled feature are enabled, imageView members of pDepthAttachment, pStencilAttachment, and elements of pColorAttachments that are not VK_NULL_HANDLE must have been created with the same sampleCount" }, + { + "vuid": "VUID-VkRenderingInfo-None-08994", + "text": " If VkDeviceGroupRenderPassBeginInfo::deviceRenderAreaCount is 0, renderArea.extent.width must be greater than 0" + }, + { + "vuid": "VUID-VkRenderingInfo-None-08995", + "text": " If VkDeviceGroupRenderPassBeginInfo::deviceRenderAreaCount is 0, renderArea.extent.height must be greater than 0" + }, { "vuid": "VUID-VkRenderingInfo-imageView-06858", "text": " If multisampled-render-to-single-sampled is enabled, then all attachments referenced by imageView members of pDepthAttachment, pStencilAttachment, and elements of pColorAttachments that are not VK_NULL_HANDLE must have a sample count that is either VK_SAMPLE_COUNT_1_BIT or equal to VkMultisampledRenderToSingleSampledInfoEXT::rasterizationSamples" @@ -7786,11 +7794,11 @@ }, { "vuid": "VUID-VkRenderPassCreateInfo2-attachment-03050", - "text": " If the attachment member of any element of pInputAttachments, pColorAttachments, pResolveAttachments or pDepthStencilAttachment, or the attachment indexed by any element of pPreserveAttachments in any given element of pSubpasses is bound to a range of a VkDeviceMemory object that overlaps with any other attachment in any subpass (including the same subpass), the VkAttachmentDescription2 structures describing them must include VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT in flags" + "text": " If the attachment member of any element of pInputAttachments, pColorAttachments, pResolveAttachments or pDepthStencilAttachment, or the attachment indexed by any element of pPreserveAttachments in any element of pSubpasses is bound to a range of a VkDeviceMemory object that overlaps with any other attachment in any subpass (including the same subpass), the VkAttachmentDescription2 structures describing them must include VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT in flags" }, { "vuid": "VUID-VkRenderPassCreateInfo2-attachment-03051", - "text": " If the attachment member of any element of pInputAttachments, pColorAttachments, pResolveAttachments or pDepthStencilAttachment, or any element of pPreserveAttachments in any given element of pSubpasses is not VK_ATTACHMENT_UNUSED, then it must be less than attachmentCount" + "text": " If the attachment member of any element of pInputAttachments, pColorAttachments, pResolveAttachments or pDepthStencilAttachment, or any element of pPreserveAttachments in any element of pSubpasses is not VK_ATTACHMENT_UNUSED, then it must be less than attachmentCount" }, { "vuid": "VUID-VkRenderPassCreateInfo2-fragmentDensityMapAttachment-06472", @@ -8202,7 +8210,7 @@ }, { "vuid": "VUID-VkSubpassDescription2-pResolveAttachments-03068", - "text": " Any given element of pResolveAttachments must have the same VkFormat as its corresponding color attachment" + "text": " Each element of pResolveAttachments must have the same VkFormat as its corresponding color attachment" }, { "vuid": "VUID-VkSubpassDescription2-multisampledRenderToSingleSampled-06869", @@ -8258,7 +8266,7 @@ }, { "vuid": "VUID-VkSubpassDescription2-pPreserveAttachments-03074", - "text": " Any given element of pPreserveAttachments must not also be an element of any other member of the subpass description" + "text": " Each element of pPreserveAttachments must not also be an element of any other member of the subpass description" }, { "vuid": "VUID-VkSubpassDescription2-layout-02528", @@ -9268,6 +9276,14 @@ "vuid": "VUID-VkRenderPassBeginInfo-renderPass-00904", "text": " renderPass must be compatible with the renderPass member of the VkFramebufferCreateInfo structure specified when creating framebuffer" }, + { + "vuid": "VUID-VkRenderPassBeginInfo-None-08996", + "text": " If VkDeviceGroupRenderPassBeginInfo::deviceRenderAreaCount is 0, renderArea.extent.width must be greater than 0" + }, + { + "vuid": "VUID-VkRenderPassBeginInfo-None-08997", + "text": " If VkDeviceGroupRenderPassBeginInfo::deviceRenderAreaCount is 0, renderArea.extent.height must be greater than 0" + }, { "vuid": "VUID-VkRenderPassBeginInfo-pNext-02850", "text": " If the pNext chain does not contain VkDeviceGroupRenderPassBeginInfo or its deviceRenderAreaCount member is equal to 0, renderArea.offset.x must be greater than or equal to 0" @@ -9488,6 +9504,14 @@ "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-offset-06169", "text": " The sum of the offset.y and extent.height members of any element of pDeviceRenderAreas must be less than or equal to maxFramebufferHeight" }, + { + "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-extent-08998", + "text": " The extent.width member of any element of pDeviceRenderAreas must be greater than 0" + }, + { + "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-extent-08999", + "text": " The extent.height member of any element of pDeviceRenderAreas must be greater than 0" + }, { "vuid": "VUID-VkDeviceGroupRenderPassBeginInfo-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO" @@ -10590,54 +10614,6 @@ } ] }, - "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV": { - "core": [ - { - "vuid": "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-physicalDevice-parameter", - "text": " physicalDevice must be a valid VkPhysicalDevice handle" - }, - { - "vuid": "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pPropertyCount-parameter", - "text": " pPropertyCount must be a valid pointer to a uint32_t value" - }, - { - "vuid": "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pProperties-parameter", - "text": " If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkCooperativeMatrixPropertiesNV structures" - } - ] - }, - "VkCooperativeMatrixPropertiesNV": { - "core": [ - { - "vuid": "VUID-VkCooperativeMatrixPropertiesNV-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV" - }, - { - "vuid": "VUID-VkCooperativeMatrixPropertiesNV-pNext-pNext", - "text": " pNext must be NULL" - }, - { - "vuid": "VUID-VkCooperativeMatrixPropertiesNV-AType-parameter", - "text": " AType must be a valid VkComponentTypeNV value" - }, - { - "vuid": "VUID-VkCooperativeMatrixPropertiesNV-BType-parameter", - "text": " BType must be a valid VkComponentTypeNV value" - }, - { - "vuid": "VUID-VkCooperativeMatrixPropertiesNV-CType-parameter", - "text": " CType must be a valid VkComponentTypeNV value" - }, - { - "vuid": "VUID-VkCooperativeMatrixPropertiesNV-DType-parameter", - "text": " DType must be a valid VkComponentTypeNV value" - }, - { - "vuid": "VUID-VkCooperativeMatrixPropertiesNV-scope-parameter", - "text": " scope must be a valid VkScopeNV value" - } - ] - }, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR": { "core": [ { @@ -10654,6 +10630,22 @@ } ] }, + "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV": { + "core": [ + { + "vuid": "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-physicalDevice-parameter", + "text": " physicalDevice must be a valid VkPhysicalDevice handle" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pPropertyCount-parameter", + "text": " pPropertyCount must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetPhysicalDeviceCooperativeMatrixPropertiesNV-pProperties-parameter", + "text": " If the value referenced by pPropertyCount is not 0, and pProperties is not NULL, pProperties must be a valid pointer to an array of pPropertyCount VkCooperativeMatrixPropertiesNV structures" + } + ] + }, "VkCooperativeMatrixPropertiesKHR": { "core": [ { @@ -10686,6 +10678,38 @@ } ] }, + "VkCooperativeMatrixPropertiesNV": { + "core": [ + { + "vuid": "VUID-VkCooperativeMatrixPropertiesNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV" + }, + { + "vuid": "VUID-VkCooperativeMatrixPropertiesNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkCooperativeMatrixPropertiesNV-AType-parameter", + "text": " AType must be a valid VkComponentTypeNV value" + }, + { + "vuid": "VUID-VkCooperativeMatrixPropertiesNV-BType-parameter", + "text": " BType must be a valid VkComponentTypeNV value" + }, + { + "vuid": "VUID-VkCooperativeMatrixPropertiesNV-CType-parameter", + "text": " CType must be a valid VkComponentTypeNV value" + }, + { + "vuid": "VUID-VkCooperativeMatrixPropertiesNV-DType-parameter", + "text": " DType must be a valid VkComponentTypeNV value" + }, + { + "vuid": "VUID-VkCooperativeMatrixPropertiesNV-scope-parameter", + "text": " scope must be a valid VkScopeNV value" + } + ] + }, "vkCreateValidationCacheEXT": { "core": [ { @@ -11094,7 +11118,7 @@ }, { "vuid": "VUID-VkPipelineShaderStageCreateInfo-pNext-02756", - "text": " If a VkPipelineShaderStageRequiredSubgroupSizeCreateInfo structure is included in the pNext chain and stage is VK_SHADER_STAGE_COMPUTE_BIT, the local workgroup size of the shader must be less than or equal to the product of VkPipelineShaderStageRequiredSubgroupSizeCreateInfo::requiredSubgroupSize and maxComputeWorkgroupSubgroups" + "text": " If a VkPipelineShaderStageRequiredSubgroupSizeCreateInfo structure is included in the pNext chain and stage is VK_SHADER_STAGE_COMPUTE_BIT, VK_SHADER_STAGE_MESH_BIT_EXT, or VK_SHADER_STAGE_TASK_BIT_EXT, the local workgroup size of the shader must be less than or equal to the product of VkPipelineShaderStageRequiredSubgroupSizeCreateInfo::requiredSubgroupSize and maxComputeWorkgroupSubgroups" }, { "vuid": "VUID-VkPipelineShaderStageCreateInfo-pNext-02757", @@ -17438,11 +17462,11 @@ }, { "vuid": "VUID-VkImageViewCreateInfo-image-02724", - "text": " If image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange.baseArrayLayer must be less than the depth computed from baseMipLevel and extent.depth specified in VkImageCreateInfo when image was created, according to the formula defined in Image Miplevel Sizing" + "text": " If image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange.baseArrayLayer must be less than the depth computed from baseMipLevel and extent.depth specified in VkImageCreateInfo when image was created, according to the formula defined in Image Mip Level Sizing" }, { "vuid": "VUID-VkImageViewCreateInfo-subresourceRange-02725", - "text": " If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange.layerCount must be non-zero and subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the depth computed from baseMipLevel and extent.depth specified in VkImageCreateInfo when image was created, according to the formula defined in Image Miplevel Sizing" + "text": " If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, image is a 3D image created with VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT set, and viewType is VK_IMAGE_VIEW_TYPE_2D or VK_IMAGE_VIEW_TYPE_2D_ARRAY, subresourceRange.layerCount must be non-zero and subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the depth computed from baseMipLevel and extent.depth specified in VkImageCreateInfo when image was created, according to the formula defined in Image Mip Level Sizing" }, { "vuid": "VUID-VkImageViewCreateInfo-image-01761", @@ -17742,11 +17766,11 @@ "core": [ { "vuid": "VUID-VkImageViewSlicedCreateInfoEXT-sliceOffset-07867", - "text": " sliceOffset must be less than the effective view depth as specified in Image Miplevel Sizing" + "text": " sliceOffset must be less than the effective view depth as specified in Image Mip Level Sizing" }, { "vuid": "VUID-VkImageViewSlicedCreateInfoEXT-sliceCount-07868", - "text": " If sliceCount is not VK_REMAINING_3D_SLICES_EXT, it must be be non-zero and sliceOffset + sliceCount must be less than or equal to the effective view depth as specified in Image Miplevel Sizing" + "text": " If sliceCount is not VK_REMAINING_3D_SLICES_EXT, it must be be non-zero and sliceOffset + sliceCount must be less than or equal to the effective view depth as specified in Image Mip Level Sizing" }, { "vuid": "VUID-VkImageViewSlicedCreateInfoEXT-image-07869", @@ -21666,7 +21690,7 @@ }, { "vuid": "VUID-VkWriteDescriptorSet-descriptorType-06450", - "text": " If descriptorType is VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView member of each element of pImageInfo must have either been created without a VkImageViewMinLodCreateInfoEXT present in the pNext chain or with a VkImageViewMinLodCreateInfoEXT::minLod of 0.0" + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, the imageView member of each element of pImageInfo must have either been created without a VkImageViewMinLodCreateInfoEXT included in the pNext chain or with a VkImageViewMinLodCreateInfoEXT::minLod of 0.0" }, { "vuid": "VUID-VkWriteDescriptorSet-descriptorType-06942", @@ -25800,13 +25824,17 @@ "vuid": "VUID-vkGetQueryPoolResults-flags-00815", "text": " If VK_QUERY_RESULT_64_BIT is set in flags then pData and stride must be multiples of 8" }, + { + "vuid": "VUID-vkGetQueryPoolResults-stride-08993", + "text": " If VK_QUERY_RESULT_WITH_AVAILABILITY_BIT is set, stride must be large enough to contain the unsigned integer representing availability or status in addition to the query result." + }, { "vuid": "VUID-vkGetQueryPoolResults-queryType-03229", "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, then pData and stride must be multiples of the size of VkPerformanceCounterResultKHR" }, { "vuid": "VUID-vkGetQueryPoolResults-queryType-04519", - "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, then stride must be large enough to contain VkQueryPoolPerformanceCreateInfoKHR::counterIndexCount used to create queryPool times the size of VkPerformanceCounterResultKHR" + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, then stride must be large enough to contain the VkQueryPoolPerformanceCreateInfoKHR::counterIndexCount used to create queryPool times the size of VkPerformanceCounterResultKHR" }, { "vuid": "VUID-vkGetQueryPoolResults-firstQuery-00816", @@ -30761,12 +30789,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDraw-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDraw-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDraw-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDraw-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDraw-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDraw-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDraw-None-06886", @@ -30802,11 +30842,11 @@ }, { "vuid": "VUID-vkCmdDraw-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDraw-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDraw-None-07835", @@ -32053,12 +32093,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawIndexed-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawIndexed-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawIndexed-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawIndexed-None-06886", @@ -32094,11 +32146,11 @@ }, { "vuid": "VUID-vkCmdDrawIndexed-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndexed-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndexed-None-07835", @@ -33353,12 +33405,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawMultiEXT-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawMultiEXT-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMultiEXT-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMultiEXT-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMultiEXT-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawMultiEXT-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawMultiEXT-None-06886", @@ -33394,11 +33458,11 @@ }, { "vuid": "VUID-vkCmdDrawMultiEXT-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMultiEXT-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMultiEXT-None-07835", @@ -34661,12 +34725,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-06886", @@ -34702,11 +34778,11 @@ }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-07835", @@ -35981,12 +36057,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawIndirect-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawIndirect-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawIndirect-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawIndirect-None-06886", @@ -36022,11 +36110,11 @@ }, { "vuid": "VUID-vkCmdDrawIndirect-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndirect-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndirect-None-07835", @@ -37317,12 +37405,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawIndirectCount-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawIndirectCount-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-06886", @@ -37358,11 +37458,11 @@ }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-07835", @@ -38665,12 +38765,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawIndexedIndirect-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-06886", @@ -38706,11 +38818,11 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-07835", @@ -40009,12 +40121,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-06886", @@ -40050,11 +40174,11 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-07835", @@ -41365,12 +41489,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-06886", @@ -41406,11 +41542,11 @@ }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-07835", @@ -42777,12 +42913,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawMeshTasksNV-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-None-06886", @@ -42818,11 +42966,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-None-07835", @@ -44001,12 +44149,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-06886", @@ -44042,11 +44202,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-07835", @@ -45273,12 +45433,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-06886", @@ -45314,11 +45486,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-07835", @@ -46561,12 +46733,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-06886", @@ -46602,11 +46786,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-07835", @@ -47813,12 +47997,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-06886", @@ -47854,11 +48050,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-07835", @@ -49113,12 +49309,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-06886", @@ -49154,11 +49362,11 @@ }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-07835", @@ -50401,12 +50609,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-06886", @@ -50442,11 +50662,11 @@ }, { "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-07835", @@ -51637,12 +51857,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-06886", @@ -51678,11 +51910,11 @@ }, { "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-07835", @@ -54274,7 +54506,7 @@ }, { "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-combinerOps-parameter", - "text": " Any given element of combinerOps must be a valid VkFragmentShadingRateCombinerOpKHR value" + "text": " Each element of combinerOps must be a valid VkFragmentShadingRateCombinerOpKHR value" }, { "vuid": "VUID-vkCmdSetFragmentShadingRateKHR-commandBuffer-recording", @@ -54342,7 +54574,7 @@ }, { "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-combinerOps-parameter", - "text": " Any given element of combinerOps must be a valid VkFragmentShadingRateCombinerOpKHR value" + "text": " Each element of combinerOps must be a valid VkFragmentShadingRateCombinerOpKHR value" }, { "vuid": "VUID-vkCmdSetFragmentShadingRateEnumNV-commandBuffer-recording", @@ -58229,12 +58461,24 @@ "text": " Memory backing image subresources used as attachments in the current render pass must not be written in any way other than as an attachment by this command" }, { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-08753", - "text": " If any recorded command in the current subpass writes to an image subresource as an attachment, this command must not read from the memory backing that image subresource in any other way than as an attachment or as a sampled or storage image per the rules defined for feedback loops" + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-09000", + "text": " If a color attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT is\nset on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_COLOR_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-09001", + "text": " If a depth attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_DEPTH_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-09002", + "text": " If a stencil attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it is not in the VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT image layout, and either:
\n
    \n
  • \n

    \nthe\nVK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT\nis set on the currently bound pipeline\nor

    \n
  • \n
  • \n

    the last call to vkCmdSetAttachmentFeedbackLoopEnableEXT included\nVK_IMAGE_ASPECT_STENCIL_BIT and

    \n
    \n
      \n
    • \n

      there is no currently bound graphics pipeline or

      \n
    • \n
    • \n

      the currently bound graphics pipeline was created with\n VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT\nit must not be accessed in any way other than as an attachment by this\ncommand

      \n
    • \n
    \n
    \n
  • \n
\n
" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-09003", + "text": " If an attachment is written by any prior command in this subpass or by the load, store, or resolve operations for this subpass, it must not be accessed in any way other than as an attachment, storage image, or sampled image by this command" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-06539", - "text": " If any recorded command in the current subpass will read from an image subresource used as an attachment in any way other than as an attachment, this command must not write to that image subresource as an attachment" + "text": " If any previously recorded command in the current subpass accessed an image subresource used as an attachment in this subpass in any way other than as an attachment, this command must not write to that image subresource as an attachment" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-06886", @@ -58270,11 +58514,11 @@ }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-07834", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_DEPTH_BIAS dynamic state enabled then vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-08620", - "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias must have been called in the current command buffer prior to this drawing command" + "text": " If a shader object is bound to any graphics stage, and the most recent call to vkCmdSetDepthBiasEnable in the current command buffer set depthBiasEnable to VK_TRUE, vkCmdSetDepthBias or vkCmdSetDepthBias2EXT must have been called in the current command buffer prior to this drawing command" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-07835", @@ -73710,15 +73954,15 @@ }, { "vuid": "VUID-StandaloneSpirv-Location-04917", - "text": " If a user-defined variables points to a Block decorated OpTypeStruct, then the OpVariable must have a Location decoration" + "text": " If a user-defined variable is a pointer to a Block decorated OpTypeStruct, then the OpVariable must have a Location decoration" }, { "vuid": "VUID-StandaloneSpirv-Location-04918", - "text": " If a user-defined variable has a Location decoration, and the variable points to a OpTypeStruct, then the members of that structure must not have Location decorations" + "text": " If a user-defined variable has a Location decoration, and the variable is a pointer to a OpTypeStruct, then the members of that structure must not have Location decorations" }, { "vuid": "VUID-StandaloneSpirv-Location-04919", - "text": " If a user-defined variable does not have a Location decoration, and the variable points to a Block decorated OpTypeStruct, then each member of the struct must have a Location decoration" + "text": " If a user-defined variable does not have a Location decoration, and the variable is a pointer to a Block decorated OpTypeStruct, then each member of the struct must have a Location decoration" }, { "vuid": "VUID-StandaloneSpirv-Component-04920", @@ -74042,7 +74286,7 @@ }, { "vuid": "VUID-StandaloneSpirv-Pointer-08973", - "text": " The {StorageClass} of the Pointer operand to OpCooperativeMatrixLoadKHR or OpCooperativeMatrixStoreKHR must be limited to Workgroup, CrossWorkgroup, StorageBuffer, or PhysicalStorageBuffer." + "text": " The {StorageClass} of the Pointer operand to OpCooperativeMatrixLoadKHR or OpCooperativeMatrixStoreKHR must be limited to Workgroup, StorageBuffer, or PhysicalStorageBuffer." } ] }, diff --git a/registry/vk.xml b/registry/vk.xml index de37660..9cafbd8 100644 --- a/registry/vk.xml +++ b/registry/vk.xml @@ -175,7 +175,7 @@ branch of the member gitlab server. #define VKSC_API_VERSION_1_0 VK_MAKE_API_VERSION(VKSC_API_VARIANT, 1, 0, 0)// Patch version should always be set to 0
// Version of this file -#define VK_HEADER_VERSION 256 +#define VK_HEADER_VERSION 257
// Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) // Version of this file @@ -711,8 +711,6 @@ typedef void* MTLSharedEvent_id; - - @@ -781,6 +779,8 @@ typedef void* MTLSharedEvent_id; + + WSI extensions @@ -9688,25 +9688,6 @@ typedef void* MTLSharedEvent_id; - - - - - - - - - - - - - - - - - - - @@ -19293,7 +19274,22 @@ typedef void* MTLSharedEvent_id; + + + + + + + + + + + + + + + @@ -24204,6 +24200,9 @@ typedef void* MTLSharedEvent_id; + + + From 2565ffa31ea67650f95f65347ed8f5917c651fac Mon Sep 17 00:00:00 2001 From: Juan Ramos Date: Mon, 10 Jul 2023 11:03:52 -0600 Subject: [PATCH 09/17] cmake: Use share instead of DATADIR for CMake config files See the following discussion for context: https://discourse.cmake.org/t/what-should-the-destination-be-for-a-header-only-librarys-cmake-config-file/8473/3 TLDR: `DATADIR` can be `share` but it doesn't have to be. It can be changed by the user. --- CMakeLists.txt | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 28b8caf..b3db540 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -68,12 +68,10 @@ if (PROJECT_IS_TOP_LEVEL) set_target_properties(Vulkan-Headers PROPERTIES EXPORT_NAME "Headers") - set(cmake_install_dir "${CMAKE_INSTALL_DATADIR}/cmake/VulkanHeaders") - install(TARGETS Vulkan-Headers EXPORT VulkanHeadersConfig INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) - install(EXPORT VulkanHeadersConfig NAMESPACE "Vulkan::" DESTINATION "${cmake_install_dir}") + install(EXPORT VulkanHeadersConfig NAMESPACE "Vulkan::" DESTINATION "share/cmake/VulkanHeaders") set(version_config "${CMAKE_CURRENT_BINARY_DIR}/generated/VulkanHeadersConfigVersion.cmake") write_basic_package_version_file("${version_config}" COMPATIBILITY SameMajorVersion ARCH_INDEPENDENT) - install(FILES "${version_config}" DESTINATION "${cmake_install_dir}") + install(FILES "${version_config}" DESTINATION "share/cmake/VulkanHeaders") endif() From bc14fdad60c51235e23ee569834a5baecae9231a Mon Sep 17 00:00:00 2001 From: Juan Ramos Date: Wed, 12 Jul 2023 12:52:35 -0600 Subject: [PATCH 10/17] Add REUSE compliance checker to CI --- .gitattributes | 8 ++++++++ .github/workflows/linux.yml | 21 +++++++++------------ .gitignore | 8 ++++++++ .reuse/dep5 | 19 +++++++++++++++++++ BUILD.gn | 16 +++------------- CMakeLists.txt | 17 ++++------------- LICENSE.md | 18 ++++++++++++++++++ LICENSE.txt => LICENSES/Apache-2.0.txt | 0 LICENSES/MIT.txt | 9 +++++++++ include/vulkan/vk_icd.h | 22 ++++------------------ include/vulkan/vk_layer.h | 22 ++++------------------ registry/spec_tools/util.py | 2 +- tests/CMakeLists.txt | 16 ++++------------ tests/add_subdirectory/CMakeLists.txt | 7 +++++++ tests/find_package/CMakeLists.txt | 7 +++++++ tests/vk_icd.c | 8 ++++++++ tests/vk_layer.c | 8 ++++++++ 17 files changed, 121 insertions(+), 87 deletions(-) create mode 100644 .reuse/dep5 create mode 100644 LICENSE.md rename LICENSE.txt => LICENSES/Apache-2.0.txt (100%) create mode 100644 LICENSES/MIT.txt diff --git a/.gitattributes b/.gitattributes index ca85b46..41eae37 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,11 @@ +# ~~~ +# Copyright 2018-2023 The Khronos Group Inc. +# Copyright 2018-2023 Valve Corporation +# Copyright 2018-2023 LunarG, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# ~~~ + # See https://git-scm.com/docs/gitattributes # See https://help.github.com/articles/dealing-with-line-endings/ diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index a15e439..38fc0b7 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -1,16 +1,6 @@ -# Copyright (c) 2022-2023 LunarG, Inc. - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# Copyright 2022-2023 LunarG, Inc. # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 name: Linux @@ -74,3 +64,10 @@ jobs: run: cmake -S tests/add_subdirectory -B tests/add_subdirectory/build - name: Build run: cmake --build tests/add_subdirectory/build + + reuse: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: REUSE Compliance Check + uses: fsfe/reuse-action@v1 diff --git a/.gitignore b/.gitignore index 61df001..7f6900b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,11 @@ +# ~~~ +# Copyright 2018-2023 The Khronos Group Inc. +# Copyright 2018-2023 Valve Corporation +# Copyright 2018-2023 LunarG, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# ~~~ + # Python cache __pycache__ *.pyc diff --git a/.reuse/dep5 b/.reuse/dep5 new file mode 100644 index 0000000..c17fc8d --- /dev/null +++ b/.reuse/dep5 @@ -0,0 +1,19 @@ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: Vulkan-Headers +Source: https://github.com/KhronosGroup/Vulkan-Headers + +Files: registry/profiles/VP_KHR_roadmap_2022.json +Copyright: 2022-2023 The Khronos Group Inc. +License: Apache-2.0 + +Files: registry/validusage.json +Copyright: 2018-2023 The Khronos Group Inc. +License: Apache-2.0 + +Files: .github/ISSUE_TEMPLATE/bug_report.md .github/pull_request_template.md +Copyright: 2022-2023 The Khronos Group Inc. +License: Apache-2.0 + +Files: BUILD.md CODE_OF_CONDUCT.md CONTRIBUTING.md INTEGRATION.md README.md +Copyright: 2018-2023 The Khronos Group Inc. +License: Apache-2.0 diff --git a/BUILD.gn b/BUILD.gn index 2203257..8a05777 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -1,17 +1,7 @@ -# Copyright (C) 2018-2023 The ANGLE Project Authors. -# Copyright (C) 2019-2023 LunarG, Inc. +# Copyright 2018-2023 The ANGLE Project Authors. +# Copyright 2019-2023 LunarG, Inc. # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 import("//build_overrides/vulkan_headers.gni") diff --git a/CMakeLists.txt b/CMakeLists.txt index b3db540..5d9317f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,18 +1,9 @@ # ~~~ -# Copyright (c) 2018-2023 Valve Corporation -# Copyright (c) 2018-2023 LunarG, Inc. +# Copyright 2018-2023 The Khronos Group Inc. +# Copyright 2018-2023 Valve Corporation +# Copyright 2018-2023 LunarG, Inc. # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 # ~~~ cmake_minimum_required(VERSION 3.15...3.25) diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..d6a0648 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,18 @@ +Copyright 2015-2023 The Khronos Group Inc. + +Files in this repository fall under one of these licenses: + +- `Apache-2.0` +- `MIT` + +Note: With the exception of `parse_dependency.py` the files using `MIT` license +also fall under `Apache-2.0`. Example: + +``` +SPDX-License-Identifier: Apache-2.0 OR MIT +``` + +Full license text of these licenses is available at: + + * Apache-2.0: https://opensource.org/licenses/Apache-2.0 + * MIT: https://opensource.org/licenses/MIT diff --git a/LICENSE.txt b/LICENSES/Apache-2.0.txt similarity index 100% rename from LICENSE.txt rename to LICENSES/Apache-2.0.txt diff --git a/LICENSES/MIT.txt b/LICENSES/MIT.txt new file mode 100644 index 0000000..8495fe9 --- /dev/null +++ b/LICENSES/MIT.txt @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2015-2023 The Khronos Group Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/include/vulkan/vk_icd.h b/include/vulkan/vk_icd.h index 2cd6c3d..59204a3 100644 --- a/include/vulkan/vk_icd.h +++ b/include/vulkan/vk_icd.h @@ -1,23 +1,9 @@ -// -// File: vk_icd.h -// /* - * Copyright (c) 2015-2023 LunarG, Inc. - * Copyright (c) 2015-2023 The Khronos Group Inc. - * Copyright (c) 2015-2023 Valve Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright 2015-2023 The Khronos Group Inc. + * Copyright 2015-2023 Valve Corporation + * Copyright 2015-2023 LunarG, Inc. * + * SPDX-License-Identifier: Apache-2.0 */ #pragma once diff --git a/include/vulkan/vk_layer.h b/include/vulkan/vk_layer.h index 7954f71..19d88fc 100644 --- a/include/vulkan/vk_layer.h +++ b/include/vulkan/vk_layer.h @@ -1,23 +1,9 @@ -// -// File: vk_layer.h -// /* - * Copyright (c) 2015-2023 LunarG, Inc. - * Copyright (c) 2015-2023 The Khronos Group Inc. - * Copyright (c) 2015-2023 Valve Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Copyright 2015-2023 The Khronos Group Inc. + * Copyright 2015-2023 Valve Corporation + * Copyright 2015-2023 LunarG, Inc. * + * SPDX-License-Identifier: Apache-2.0 */ #pragma once diff --git a/registry/spec_tools/util.py b/registry/spec_tools/util.py index bf25845..a607dd2 100644 --- a/registry/spec_tools/util.py +++ b/registry/spec_tools/util.py @@ -1,5 +1,5 @@ """Utility functions not closely tied to other spec_tools types.""" -# Copyright (c) 2018-2019 Collabora, Ltd. +# Copyright 2018-2019 Collabora, Ltd. # Copyright 2013-2023 The Khronos Group Inc. # # SPDX-License-Identifier: Apache-2.0 diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index e24f3d2..85afe76 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -1,17 +1,9 @@ # ~~~ -# Copyright (c) 2022-2023 LunarG, Inc. +# Copyright 2022-2023 The Khronos Group Inc. +# Copyright 2022-2023 Valve Corporation +# Copyright 2022-2023 LunarG, Inc. # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# SPDX-License-Identifier: Apache-2.0 # ~~~ # Test the non-API headers provided by this repo diff --git a/tests/add_subdirectory/CMakeLists.txt b/tests/add_subdirectory/CMakeLists.txt index cb8fdcc..286c36e 100644 --- a/tests/add_subdirectory/CMakeLists.txt +++ b/tests/add_subdirectory/CMakeLists.txt @@ -1,3 +1,10 @@ +# ~~~ +# Copyright 2022-2023 The Khronos Group Inc. +# Copyright 2022-2023 Valve Corporation +# Copyright 2022-2023 LunarG, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# ~~~ cmake_minimum_required(VERSION 3.14.2) project(TEST_VULKAN_HEADERS_ADD_SUBDIRECTORY_SUPPORT LANGUAGES C) diff --git a/tests/find_package/CMakeLists.txt b/tests/find_package/CMakeLists.txt index f45fc6c..c1a9900 100644 --- a/tests/find_package/CMakeLists.txt +++ b/tests/find_package/CMakeLists.txt @@ -1,3 +1,10 @@ +# ~~~ +# Copyright 2022-2023 The Khronos Group Inc. +# Copyright 2022-2023 Valve Corporation +# Copyright 2022-2023 LunarG, Inc. +# +# SPDX-License-Identifier: Apache-2.0 +# ~~~ cmake_minimum_required(VERSION 3.14.2) project(TEST_VULKAN_HEADERS_FIND_PACKAGE_SUPPORT LANGUAGES C) diff --git a/tests/vk_icd.c b/tests/vk_icd.c index 23766e5..de29d36 100644 --- a/tests/vk_icd.c +++ b/tests/vk_icd.c @@ -1,3 +1,11 @@ +/* + * Copyright 2022-2023 The Khronos Group Inc. + * Copyright 2022-2023 Valve Corporation + * Copyright 2022-2023 LunarG, Inc. + * + * SPDX-License-Identifier: Apache-2.0 + */ + #include "vulkan/vk_icd.h" int main() diff --git a/tests/vk_layer.c b/tests/vk_layer.c index df2bc09..490beb3 100644 --- a/tests/vk_layer.c +++ b/tests/vk_layer.c @@ -1,3 +1,11 @@ +/* + * Copyright 2022-2023 The Khronos Group Inc. + * Copyright 2022-2023 Valve Corporation + * Copyright 2022-2023 LunarG, Inc. + * + * SPDX-License-Identifier: Apache-2.0 + */ + #include "vulkan/vk_layer.h" int foobar() From 6eee20744f23424ef6088167aae1b52dfbcc1385 Mon Sep 17 00:00:00 2001 From: Juan Ramos Date: Mon, 17 Jul 2023 12:38:05 -0600 Subject: [PATCH 11/17] ci: Add Windows to CI --- .github/workflows/{linux.yml => ci.yml} | 18 +++++++++++++++++- tests/CMakeLists.txt | 8 ++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) rename .github/workflows/{linux.yml => ci.yml} (80%) diff --git a/.github/workflows/linux.yml b/.github/workflows/ci.yml similarity index 80% rename from .github/workflows/linux.yml rename to .github/workflows/ci.yml index 38fc0b7..82bf6af 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/ci.yml @@ -2,7 +2,7 @@ # # SPDX-License-Identifier: Apache-2.0 -name: Linux +name: ci on: push: @@ -43,6 +43,22 @@ jobs: - name: Build Vulkan-Headers Tests run: cmake --build build + windows-cmake-tests: + runs-on: windows-latest + strategy: + matrix: + arch: [ amd64, amd64_x86 ] + steps: + - uses: actions/checkout@v3 + - uses: lukka/get-cmake@latest + - uses: ilammy/msvc-dev-cmd@v1 + with: + arch: ${{ matrix.arch }} + - name: Configure Vulkan-Headers + run: cmake -S . -B build -D BUILD_TESTS=ON --log-level=DEBUG + - name: Build Vulkan-Headers Tests + run: cmake --build build + test-cmake-minimum: runs-on: ubuntu-latest steps: diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 85afe76..6b3181e 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -25,6 +25,14 @@ if(${CMAKE_C_COMPILER_ID} MATCHES "(GNU|Clang)") ) endif() +if (MSVC) + add_compile_options( + /W4 + /permissive- + /WX + ) +endif() + # vk_icd.h add_executable(vk_icd vk_icd.c) target_link_libraries(vk_icd PRIVATE Vulkan::Headers) From 9c37439a7952c204150863fc35569dd864dbd599 Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Fri, 21 Jul 2023 03:40:13 -0700 Subject: [PATCH 12/17] Update for Vulkan-Docs 1.3.258 --- include/vk_video/vulkan_video_codec_h264std.h | 2 +- .../vulkan_video_codec_h264std_decode.h | 3 +- .../vulkan_video_codec_h264std_encode.h | 1 + include/vk_video/vulkan_video_codec_h265std.h | 11 +- .../vulkan_video_codec_h265std_decode.h | 3 +- .../vulkan_video_codec_h265std_encode.h | 1 + include/vk_video/vulkan_video_codecs_common.h | 4 + include/vulkan/vulkan.cppm | 32 +- include/vulkan/vulkan.hpp | 243 +- include/vulkan/vulkan_beta.h | 2 +- include/vulkan/vulkan_core.h | 240 +- include/vulkan/vulkan_enums.hpp | 43 +- .../vulkan/vulkan_extension_inspection.hpp | 9 +- include/vulkan/vulkan_funcs.hpp | 262 +- include/vulkan/vulkan_handles.hpp | 136 +- include/vulkan/vulkan_hash.hpp | 225 ++ include/vulkan/vulkan_raii.hpp | 220 +- include/vulkan/vulkan_static_assertions.hpp | 115 +- include/vulkan/vulkan_structs.hpp | 1853 +++++++++++ include/vulkan/vulkan_to_string.hpp | 49 + include/vulkan/vulkan_video.hpp | 2696 +++++++++++++++++ registry/generator.py | 9 + registry/genvk.py | 13 +- registry/reg.py | 18 +- registry/spec_tools/util.py | 2 +- registry/validusage.json | 1298 ++++++-- registry/video.xml | 18 +- registry/vk.xml | 238 +- 28 files changed, 7208 insertions(+), 538 deletions(-) create mode 100644 include/vulkan/vulkan_video.hpp diff --git a/include/vk_video/vulkan_video_codec_h264std.h b/include/vk_video/vulkan_video_codec_h264std.h index 959961c..a23789c 100644 --- a/include/vk_video/vulkan_video_codec_h264std.h +++ b/include/vk_video/vulkan_video_codec_h264std.h @@ -21,7 +21,7 @@ extern "C" { // vulkan_video_codec_h264std is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h264std 1 -#include +#include "vulkan_video_codecs_common.h" #define STD_VIDEO_H264_CPB_CNT_LIST_SIZE 32 #define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS 6 #define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS 16 diff --git a/include/vk_video/vulkan_video_codec_h264std_decode.h b/include/vk_video/vulkan_video_codec_h264std_decode.h index 84ed632..dd24112 100644 --- a/include/vk_video/vulkan_video_codec_h264std_decode.h +++ b/include/vk_video/vulkan_video_codec_h264std_decode.h @@ -21,12 +21,13 @@ extern "C" { // vulkan_video_codec_h264std_decode is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h264std_decode 1 +#include "vulkan_video_codec_h264std.h" #define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) -#define STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE 2 #define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 #define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_decode" +#define STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE 2 typedef enum StdVideoDecodeH264FieldOrderCount { STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP = 0, diff --git a/include/vk_video/vulkan_video_codec_h264std_encode.h b/include/vk_video/vulkan_video_codec_h264std_encode.h index 5c54a67..2e0d706 100644 --- a/include/vk_video/vulkan_video_codec_h264std_encode.h +++ b/include/vk_video/vulkan_video_codec_h264std_encode.h @@ -21,6 +21,7 @@ extern "C" { // vulkan_video_codec_h264std_encode is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h264std_encode 1 +#include "vulkan_video_codec_h264std.h" // Vulkan 0.9 provisional Vulkan video H.264 encode std specification version number #define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_10 VK_MAKE_VIDEO_STD_VERSION(0, 9, 10) diff --git a/include/vk_video/vulkan_video_codec_h265std.h b/include/vk_video/vulkan_video_codec_h265std.h index 1221ba5..793bdbc 100644 --- a/include/vk_video/vulkan_video_codec_h265std.h +++ b/include/vk_video/vulkan_video_codec_h265std.h @@ -21,8 +21,9 @@ extern "C" { // vulkan_video_codec_h265std is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h265std 1 -#define STD_VIDEO_H265_SUBLAYERS_LIST_SIZE 7 +#include "vulkan_video_codecs_common.h" #define STD_VIDEO_H265_CPB_CNT_LIST_SIZE 32 +#define STD_VIDEO_H265_SUBLAYERS_LIST_SIZE 7 #define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS 6 #define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS 16 #define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS 6 @@ -31,16 +32,16 @@ extern "C" { #define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS 64 #define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS 2 #define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS 64 -#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE 3 -#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE 128 -#define STD_VIDEO_H265_MAX_DPB_SIZE 16 -#define STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS 32 #define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6 #define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE 19 #define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE 21 +#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE 3 +#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE 128 #define STD_VIDEO_H265_MAX_NUM_LIST_REF 15 #define STD_VIDEO_H265_MAX_CHROMA_PLANES 2 #define STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS 64 +#define STD_VIDEO_H265_MAX_DPB_SIZE 16 +#define STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS 32 #define STD_VIDEO_H265_MAX_LONG_TERM_PICS 16 #define STD_VIDEO_H265_MAX_DELTA_POC 48 diff --git a/include/vk_video/vulkan_video_codec_h265std_decode.h b/include/vk_video/vulkan_video_codec_h265std_decode.h index a46a56b..75cf4d0 100644 --- a/include/vk_video/vulkan_video_codec_h265std_decode.h +++ b/include/vk_video/vulkan_video_codec_h265std_decode.h @@ -21,12 +21,13 @@ extern "C" { // vulkan_video_codec_h265std_decode is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h265std_decode 1 +#include "vulkan_video_codec_h265std.h" #define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) -#define STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE 8 #define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 #define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_decode" +#define STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE 8 typedef struct StdVideoDecodeH265PictureInfoFlags { uint32_t IrapPicFlag : 1; uint32_t IdrPicFlag : 1; diff --git a/include/vk_video/vulkan_video_codec_h265std_encode.h b/include/vk_video/vulkan_video_codec_h265std_encode.h index 2b4f612..2574039 100644 --- a/include/vk_video/vulkan_video_codec_h265std_encode.h +++ b/include/vk_video/vulkan_video_codec_h265std_encode.h @@ -21,6 +21,7 @@ extern "C" { // vulkan_video_codec_h265std_encode is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codec_h265std_encode 1 +#include "vulkan_video_codec_h265std.h" // Vulkan 0.9 provisional Vulkan video H.265 encode std specification version number #define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_11 VK_MAKE_VIDEO_STD_VERSION(0, 9, 11) diff --git a/include/vk_video/vulkan_video_codecs_common.h b/include/vk_video/vulkan_video_codecs_common.h index d0e86d1..6568975 100644 --- a/include/vk_video/vulkan_video_codecs_common.h +++ b/include/vk_video/vulkan_video_codecs_common.h @@ -21,6 +21,10 @@ extern "C" { // vulkan_video_codecs_common is a preprocessor guard. Do not pass it to API calls. #define vulkan_video_codecs_common 1 +#if !defined(VK_NO_STDINT_H) + #include +#endif + #define VK_MAKE_VIDEO_STD_VERSION(major, minor, patch) \ ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) diff --git a/include/vulkan/vulkan.cppm b/include/vulkan/vulkan.cppm index 3997211..e5bd813 100644 --- a/include/vulkan/vulkan.cppm +++ b/include/vulkan/vulkan.cppm @@ -640,6 +640,10 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_pipeline_executable_properties === using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR; + //=== VK_EXT_host_image_copy === + using VULKAN_HPP_NAMESPACE::HostImageCopyFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT; + //=== VK_KHR_map_memory2 === using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagBitsKHR; using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR; @@ -2060,6 +2064,20 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PipelineInfoEXT; using VULKAN_HPP_NAMESPACE::PipelineInfoKHR; + //=== VK_EXT_host_image_copy === + using VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT; + using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT; + using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT; + using VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT; + using VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT; + using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT; + //=== VK_KHR_map_memory2 === using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR; using VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR; @@ -2247,9 +2265,7 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_image_compression_control === using VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT; using VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT; - using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT; using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT; - using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT; //=== VK_EXT_attachment_feedback_loop_layout === using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; @@ -2442,6 +2458,12 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV; using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV; + //=== VK_NV_device_generated_commands_compute === + using VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV; + using VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + using VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV; + //=== VK_NV_linear_color_attachment === using VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV; @@ -2762,9 +2784,9 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::StructExtends; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_DYNAMIC_LOADER_TOOL ) +#if defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL ) using VULKAN_HPP_NAMESPACE::DynamicLoader; -#endif /*VULKAN_HPP_DYNAMIC_LOADER_TOOL*/ +#endif /*VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL*/ using VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic; @@ -2807,7 +2829,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::isObsoletedExtension; using VULKAN_HPP_NAMESPACE::isPromotedExtension; - export namespace VULKAN_HPP_RAII_NAMESPACE + namespace VULKAN_HPP_RAII_NAMESPACE { //====================== //=== RAII HARDCODED === diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp index 603be0f..f2e6e75 100644 --- a/include/vulkan/vulkan.hpp +++ b/include/vulkan/vulkan.hpp @@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 257, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 258, "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION @@ -212,6 +212,14 @@ constexpr int False = 0; # define VULKAN_HPP_CONST_OR_CONSTEXPR const #endif +#if !defined( VULKAN_HPP_CONSTEXPR_INLINE ) +# if 201606L <= __cpp_inline_variables +# define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR inline +# else +# define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR +# endif +#endif + #if !defined( VULKAN_HPP_NOEXCEPT ) # if defined( _MSC_VER ) && ( _MSC_VER <= 1800 ) # define VULKAN_HPP_NOEXCEPT @@ -4904,6 +4912,37 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); } + //=== VK_EXT_host_image_copy === + + VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo ); + } + + VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo ); + } + + VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo ); + } + + VkResult + vkTransitionImageLayoutEXT( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT * pTransitions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTransitionImageLayoutEXT( device, transitionCount, pTransitions ); + } + + void vkGetImageSubresourceLayout2EXT( VkDevice device, + VkImage image, + const VkImageSubresource2EXT * pSubresource, + VkSubresourceLayout2EXT * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); + } + //=== VK_KHR_map_memory2 === VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfoKHR * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT @@ -5237,16 +5276,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); } - //=== VK_EXT_image_compression_control === - - void vkGetImageSubresourceLayout2EXT( VkDevice device, - VkImage image, - const VkImageSubresource2EXT * pSubresource, - VkSubresourceLayout2EXT * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); - } - //=== VK_EXT_device_fault === VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT @@ -5687,6 +5716,26 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride ); } + //=== VK_NV_device_generated_commands_compute === + + void vkGetPipelineIndirectMemoryRequirementsNV( VkDevice device, + const VkComputePipelineCreateInfo * pCreateInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectMemoryRequirementsNV( device, pCreateInfo, pMemoryRequirements ); + } + + void + vkCmdUpdatePipelineIndirectBuffer( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdatePipelineIndirectBuffer( commandBuffer, pipelineBindPoint, pipeline ); + } + + VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectDeviceAddressNV( device, pInfo ); + } + //=== VK_EXT_extended_dynamic_state3 === void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT @@ -6874,32 +6923,32 @@ namespace VULKAN_HPP_NAMESPACE //========================================= //=== CONSTEXPR CONSTANTs AND FUNCTIONs === //========================================= - VULKAN_HPP_CONSTEXPR uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED; - VULKAN_HPP_CONSTEXPR uint32_t False = VK_FALSE; - VULKAN_HPP_CONSTEXPR float LodClampNone = VK_LOD_CLAMP_NONE; - VULKAN_HPP_CONSTEXPR uint32_t LuidSize = VK_LUID_SIZE; - VULKAN_HPP_CONSTEXPR uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE; - VULKAN_HPP_CONSTEXPR uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE; - VULKAN_HPP_CONSTEXPR uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE; - VULKAN_HPP_CONSTEXPR uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE; - VULKAN_HPP_CONSTEXPR uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE; - VULKAN_HPP_CONSTEXPR uint32_t MaxGlobalPrioritySizeKhr = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; - VULKAN_HPP_CONSTEXPR uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS; - VULKAN_HPP_CONSTEXPR uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES; - VULKAN_HPP_CONSTEXPR uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; - VULKAN_HPP_CONSTEXPR uint32_t MaxShaderModuleIdentifierSizeExt = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; - VULKAN_HPP_CONSTEXPR uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL; - VULKAN_HPP_CONSTEXPR uint32_t QueueFamilyForeignExt = VK_QUEUE_FAMILY_FOREIGN_EXT; - VULKAN_HPP_CONSTEXPR uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED; - VULKAN_HPP_CONSTEXPR uint32_t Remaining3DSlicesExt = VK_REMAINING_3D_SLICES_EXT; - VULKAN_HPP_CONSTEXPR uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS; - VULKAN_HPP_CONSTEXPR uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS; - VULKAN_HPP_CONSTEXPR uint32_t ShaderUnusedKhr = VK_SHADER_UNUSED_KHR; - VULKAN_HPP_CONSTEXPR uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL; - VULKAN_HPP_CONSTEXPR uint32_t True = VK_TRUE; - VULKAN_HPP_CONSTEXPR uint32_t UuidSize = VK_UUID_SIZE; - VULKAN_HPP_CONSTEXPR uint64_t WholeSize = VK_WHOLE_SIZE; - VULKAN_HPP_CONSTEXPR uint32_t HeaderVersion = VK_HEADER_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t False = VK_FALSE; + VULKAN_HPP_CONSTEXPR_INLINE float LodClampNone = VK_LOD_CLAMP_NONE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSize = VK_LUID_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeKhr = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxShaderModuleIdentifierSizeExt = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyForeignExt = VK_QUEUE_FAMILY_FOREIGN_EXT; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t Remaining3DSlicesExt = VK_REMAINING_3D_SLICES_EXT; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKhr = VK_SHADER_UNUSED_KHR; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t True = VK_TRUE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t UuidSize = VK_UUID_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint64_t WholeSize = VK_WHOLE_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION; template ::value>::type> VULKAN_HPP_CONSTEXPR uint32_t apiVersionMajor( T const version ) { @@ -6949,12 +6998,12 @@ namespace VULKAN_HPP_NAMESPACE { return ( ( uint32_t )(version)&0xFFFU ); } - VULKAN_HPP_CONSTEXPR auto ApiVersion = makeApiVersion( 0, 1, 0, 0 ); - VULKAN_HPP_CONSTEXPR auto ApiVersion10 = makeApiVersion( 0, 1, 0, 0 ); - VULKAN_HPP_CONSTEXPR auto ApiVersion11 = makeApiVersion( 0, 1, 1, 0 ); - VULKAN_HPP_CONSTEXPR auto ApiVersion12 = makeApiVersion( 0, 1, 2, 0 ); - VULKAN_HPP_CONSTEXPR auto ApiVersion13 = makeApiVersion( 0, 1, 3, 0 ); - VULKAN_HPP_CONSTEXPR auto HeaderVersionComplete = makeApiVersion( 0, 1, 3, VK_HEADER_VERSION ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion = makeApiVersion( 0, 1, 0, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion10 = makeApiVersion( 0, 1, 0, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion11 = makeApiVersion( 0, 1, 1, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion12 = makeApiVersion( 0, 1, 2, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion13 = makeApiVersion( 0, 1, 3, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto HeaderVersionComplete = makeApiVersion( 0, 1, 3, VK_HEADER_VERSION ); } // namespace VULKAN_HPP_NAMESPACE @@ -10604,6 +10653,48 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_host_image_copy === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_EXT_shader_atomic_float2 === template <> struct StructExtends @@ -12504,6 +12595,24 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_NV_device_generated_commands_compute === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_NV_linear_color_attachment === template <> struct StructExtends @@ -14064,6 +14173,13 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + //=== VK_EXT_host_image_copy === + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; + //=== VK_KHR_map_memory2 === PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; @@ -14149,9 +14265,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; - //=== VK_EXT_image_compression_control === - PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; - //=== VK_EXT_device_fault === PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; @@ -14288,6 +14401,11 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; + //=== VK_NV_device_generated_commands_compute === + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBuffer vkCmdUpdatePipelineIndirectBuffer = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; + //=== VK_EXT_extended_dynamic_state3 === PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; @@ -15325,6 +15443,13 @@ namespace VULKAN_HPP_NAMESPACE vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); + //=== VK_KHR_map_memory2 === vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetInstanceProcAddr( instance, "vkUnmapMemory2KHR" ) ); @@ -15446,9 +15571,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCmdResolveImage2 ) vkCmdResolveImage2 = vkCmdResolveImage2KHR; - //=== VK_EXT_image_compression_control === - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); - //=== VK_EXT_device_fault === vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) ); @@ -15589,6 +15711,13 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) ); + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBuffer = PFN_vkCmdUpdatePipelineIndirectBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdatePipelineIndirectBuffer" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectDeviceAddressNV" ) ); + //=== VK_EXT_extended_dynamic_state3 === vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) ); vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) ); @@ -16331,6 +16460,13 @@ namespace VULKAN_HPP_NAMESPACE vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + //=== VK_KHR_map_memory2 === vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); @@ -16446,9 +16582,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCmdResolveImage2 ) vkCmdResolveImage2 = vkCmdResolveImage2KHR; - //=== VK_EXT_image_compression_control === - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); - //=== VK_EXT_device_fault === vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); @@ -16567,6 +16700,12 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBuffer = PFN_vkCmdUpdatePipelineIndirectBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBuffer" ) ); + vkGetPipelineIndirectDeviceAddressNV = PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + //=== VK_EXT_extended_dynamic_state3 === vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); diff --git a/include/vulkan/vulkan_beta.h b/include/vulkan/vulkan_beta.h index d2bcf4d..75fabd4 100644 --- a/include/vulkan/vulkan_beta.h +++ b/include/vulkan/vulkan_beta.h @@ -645,7 +645,7 @@ typedef struct VkVideoEncodeH265GopRemainingFrameInfoEXT { // VK_NV_displacement_micromap is a preprocessor guard. Do not pass it to API calls. #define VK_NV_displacement_micromap 1 -#define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 1 +#define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 2 #define VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME "VK_NV_displacement_micromap" typedef enum VkDisplacementMicromapFormatNV { diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h index cf85442..b635c86 100644 --- a/include/vulkan/vulkan_core.h +++ b/include/vulkan/vulkan_core.h @@ -69,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 257 +#define VK_HEADER_VERSION 258 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -800,6 +800,16 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT = 1000270000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT = 1000270001, + VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT = 1000270002, + VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT = 1000270003, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT = 1000270004, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT = 1000270005, + VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT = 1000270006, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT = 1000270007, + VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT = 1000270008, + VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT = 1000270009, VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR = 1000271000, VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR = 1000271001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT = 1000273000, @@ -1024,6 +1034,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV = 1000426001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV = 1000427000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV = 1000427001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV = 1000428000, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV = 1000428001, + VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV = 1000428002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV = 1000430000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT = 1000437000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM = 1000440000, @@ -2291,6 +2304,7 @@ typedef enum VkImageUsageFlagBits { VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000, VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00000100, + VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT = 0x00400000, #ifdef VK_ENABLE_BETA_EXTENSIONS VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000, #endif @@ -2681,6 +2695,7 @@ typedef enum VkDescriptorSetLayoutCreateFlagBits { VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00000010, VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT = 0x00000020, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00000080, VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT = 0x00000004, VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, @@ -6735,6 +6750,7 @@ static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_K static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT = 0x400000000000ULL; #ifdef VK_ENABLE_BETA_EXTENSIONS static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000ULL; #endif @@ -13874,6 +13890,153 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT( #endif +// VK_EXT_host_image_copy is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_host_image_copy 1 +#define VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION 1 +#define VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME "VK_EXT_host_image_copy" + +typedef enum VkHostImageCopyFlagBitsEXT { + VK_HOST_IMAGE_COPY_MEMCPY_EXT = 0x00000001, + VK_HOST_IMAGE_COPY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkHostImageCopyFlagBitsEXT; +typedef VkFlags VkHostImageCopyFlagsEXT; +typedef struct VkPhysicalDeviceHostImageCopyFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 hostImageCopy; +} VkPhysicalDeviceHostImageCopyFeaturesEXT; + +typedef struct VkPhysicalDeviceHostImageCopyPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t copySrcLayoutCount; + VkImageLayout* pCopySrcLayouts; + uint32_t copyDstLayoutCount; + VkImageLayout* pCopyDstLayouts; + uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE]; + VkBool32 identicalMemoryTypeRequirements; +} VkPhysicalDeviceHostImageCopyPropertiesEXT; + +typedef struct VkMemoryToImageCopyEXT { + VkStructureType sType; + const void* pNext; + const void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkMemoryToImageCopyEXT; + +typedef struct VkImageToMemoryCopyEXT { + VkStructureType sType; + const void* pNext; + void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkImageToMemoryCopyEXT; + +typedef struct VkCopyMemoryToImageInfoEXT { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlagsEXT flags; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkMemoryToImageCopyEXT* pRegions; +} VkCopyMemoryToImageInfoEXT; + +typedef struct VkCopyImageToMemoryInfoEXT { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlagsEXT flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + uint32_t regionCount; + const VkImageToMemoryCopyEXT* pRegions; +} VkCopyImageToMemoryInfoEXT; + +typedef struct VkCopyImageToImageInfoEXT { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlagsEXT flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2* pRegions; +} VkCopyImageToImageInfoEXT; + +typedef struct VkHostImageLayoutTransitionInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; + VkImageLayout oldLayout; + VkImageLayout newLayout; + VkImageSubresourceRange subresourceRange; +} VkHostImageLayoutTransitionInfoEXT; + +typedef struct VkSubresourceHostMemcpySizeEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize size; +} VkSubresourceHostMemcpySizeEXT; + +typedef struct VkHostImageCopyDevicePerformanceQueryEXT { + VkStructureType sType; + void* pNext; + VkBool32 optimalDeviceAccess; + VkBool32 identicalMemoryLayout; +} VkHostImageCopyDevicePerformanceQueryEXT; + +typedef struct VkSubresourceLayout2EXT { + VkStructureType sType; + void* pNext; + VkSubresourceLayout subresourceLayout; +} VkSubresourceLayout2EXT; + +typedef struct VkImageSubresource2EXT { + VkStructureType sType; + void* pNext; + VkImageSubresource imageSubresource; +} VkImageSubresource2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImageEXT)(VkDevice device, const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemoryEXT)(VkDevice device, const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImageEXT)(VkDevice device, const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayoutEXT)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT* pTransitions); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImageEXT( + VkDevice device, + const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemoryEXT( + VkDevice device, + const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImageEXT( + VkDevice device, + const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayoutEXT( + VkDevice device, + uint32_t transitionCount, + const VkHostImageLayoutTransitionInfoEXT* pTransitions); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT( + VkDevice device, + VkImage image, + const VkImageSubresource2EXT* pSubresource, + VkSubresourceLayout2EXT* pLayout); +#endif + + // VK_EXT_shader_atomic_float2 is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_atomic_float2 1 #define VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION 1 @@ -14022,6 +14185,8 @@ typedef enum VkIndirectCommandsTokenTypeNV { VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV = 1000328000, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV = 1000428003, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV = 1000428004, VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF } VkIndirectCommandsTokenTypeNV; @@ -15057,18 +15222,6 @@ typedef struct VkImageCompressionControlEXT { VkImageCompressionFixedRateFlagsEXT* pFixedRateFlags; } VkImageCompressionControlEXT; -typedef struct VkSubresourceLayout2EXT { - VkStructureType sType; - void* pNext; - VkSubresourceLayout subresourceLayout; -} VkSubresourceLayout2EXT; - -typedef struct VkImageSubresource2EXT { - VkStructureType sType; - void* pNext; - VkImageSubresource imageSubresource; -} VkImageSubresource2EXT; - typedef struct VkImageCompressionPropertiesEXT { VkStructureType sType; void* pNext; @@ -15076,15 +15229,6 @@ typedef struct VkImageCompressionPropertiesEXT { VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags; } VkImageCompressionPropertiesEXT; -typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT( - VkDevice device, - VkImage image, - const VkImageSubresource2EXT* pSubresource, - VkSubresourceLayout2EXT* pLayout); -#endif // VK_EXT_attachment_feedback_loop_layout is a preprocessor guard. Do not pass it to API calls. @@ -16283,6 +16427,58 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryIndirectCountNV( #endif +// VK_NV_device_generated_commands_compute is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_generated_commands_compute 1 +#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION 1 +#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_EXTENSION_NAME "VK_NV_device_generated_commands_compute" +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCompute; + VkBool32 deviceGeneratedComputePipelines; + VkBool32 deviceGeneratedComputeCaptureReplay; +} VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + +typedef struct VkComputePipelineIndirectBufferInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; + VkDeviceAddress pipelineDeviceAddressCaptureReplay; +} VkComputePipelineIndirectBufferInfoNV; + +typedef struct VkPipelineIndirectDeviceAddressInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; +} VkPipelineIndirectDeviceAddressInfoNV; + +typedef struct VkBindPipelineIndirectCommandNV { + VkDeviceAddress pipelineAddress; +} VkBindPipelineIndirectCommandNV; + +typedef void (VKAPI_PTR *PFN_vkGetPipelineIndirectMemoryRequirementsNV)(VkDevice device, const VkComputePipelineCreateInfo* pCreateInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdUpdatePipelineIndirectBuffer)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetPipelineIndirectDeviceAddressNV)(VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPipelineIndirectMemoryRequirementsNV( + VkDevice device, + const VkComputePipelineCreateInfo* pCreateInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdatePipelineIndirectBuffer( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetPipelineIndirectDeviceAddressNV( + VkDevice device, + const VkPipelineIndirectDeviceAddressInfoNV* pInfo); +#endif + + // VK_NV_linear_color_attachment is a preprocessor guard. Do not pass it to API calls. #define VK_NV_linear_color_attachment 1 #define VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION 1 diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp index f5b118c..27cb036 100644 --- a/include/vulkan/vulkan_enums.hpp +++ b/include/vulkan/vulkan_enums.hpp @@ -801,6 +801,16 @@ namespace VULKAN_HPP_NAMESPACE ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, + ePhysicalDeviceHostImageCopyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT, + ePhysicalDeviceHostImageCopyPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT, + eMemoryToImageCopyEXT = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT, + eImageToMemoryCopyEXT = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT, + eCopyImageToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT, + eCopyMemoryToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT, + eHostImageLayoutTransitionInfoEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT, + eCopyImageToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT, + eSubresourceHostMemcpySizeEXT = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT, + eHostImageCopyDevicePerformanceQueryEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT, eMemoryMapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR, eMemoryUnmapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR, ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT, @@ -1049,6 +1059,9 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceCopyMemoryIndirectPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV, ePhysicalDeviceMemoryDecompressionFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV, ePhysicalDeviceMemoryDecompressionPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV, + ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV, + eComputePipelineIndirectBufferInfoNV = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV, + ePipelineIndirectDeviceAddressInfoNV = VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV, ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV, ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT, ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM, @@ -1641,6 +1654,7 @@ namespace VULKAN_HPP_NAMESPACE eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eHostTransferEXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT, #if defined( VK_ENABLE_BETA_EXTENSIONS ) eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, @@ -1662,7 +1676,8 @@ namespace VULKAN_HPP_NAMESPACE ImageUsageFlagBits::eTransferSrc | ImageUsageFlagBits::eTransferDst | ImageUsageFlagBits::eSampled | ImageUsageFlagBits::eStorage | ImageUsageFlagBits::eColorAttachment | ImageUsageFlagBits::eDepthStencilAttachment | ImageUsageFlagBits::eTransientAttachment | ImageUsageFlagBits::eInputAttachment | ImageUsageFlagBits::eVideoDecodeDstKHR | ImageUsageFlagBits::eVideoDecodeSrcKHR | - ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR + ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR | + ImageUsageFlagBits::eHostTransferEXT #if defined( VK_ENABLE_BETA_EXTENSIONS ) | ImageUsageFlagBits::eVideoEncodeDstKHR | ImageUsageFlagBits::eVideoEncodeSrcKHR | ImageUsageFlagBits::eVideoEncodeDpbKHR #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -2856,6 +2871,7 @@ namespace VULKAN_HPP_NAMESPACE eDescriptorBufferEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, eEmbeddedImmutableSamplersEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT, eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE, + eIndirectBindableNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV, eHostOnlyPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT }; @@ -2868,7 +2884,7 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorSetLayoutCreateFlags allFlags = DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool | DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR | DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT | DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT | - DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT; + DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV | DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT; }; enum class DescriptorType @@ -3942,6 +3958,7 @@ namespace VULKAN_HPP_NAMESPACE eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, eFragmentDensityMapEXT = VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eHostImageTransferEXT = VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT, #if defined( VK_ENABLE_BETA_EXTENSIONS ) eVideoEncodeInputKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR, eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR, @@ -3977,7 +3994,7 @@ namespace VULKAN_HPP_NAMESPACE FormatFeatureFlagBits2::eCositedChromaSamples | FormatFeatureFlagBits2::eStorageReadWithoutFormat | FormatFeatureFlagBits2::eStorageWriteWithoutFormat | FormatFeatureFlagBits2::eSampledImageDepthComparison | FormatFeatureFlagBits2::eVideoDecodeOutputKHR | FormatFeatureFlagBits2::eVideoDecodeDpbKHR | FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR | FormatFeatureFlagBits2::eFragmentDensityMapEXT | - FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR + FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits2::eHostImageTransferEXT #if defined( VK_ENABLE_BETA_EXTENSIONS ) | FormatFeatureFlagBits2::eVideoEncodeInputKHR | FormatFeatureFlagBits2::eVideoEncodeDpbKHR #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -5704,6 +5721,22 @@ namespace VULKAN_HPP_NAMESPACE eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR }; + //=== VK_EXT_host_image_copy === + + enum class HostImageCopyFlagBitsEXT : VkHostImageCopyFlagsEXT + { + eMemcpy = VK_HOST_IMAGE_COPY_MEMCPY_EXT + }; + + using HostImageCopyFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR HostImageCopyFlagsEXT allFlags = HostImageCopyFlagBitsEXT::eMemcpy; + }; + //=== VK_KHR_map_memory2 === enum class MemoryUnmapFlagBitsKHR : VkMemoryUnmapFlagsKHR @@ -5781,7 +5814,9 @@ namespace VULKAN_HPP_NAMESPACE eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV, - eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV + eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV, + ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV, + eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV }; enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV diff --git a/include/vulkan/vulkan_extension_inspection.hpp b/include/vulkan/vulkan_extension_inspection.hpp index 299fabc..5084b29 100644 --- a/include/vulkan/vulkan_extension_inspection.hpp +++ b/include/vulkan/vulkan_extension_inspection.hpp @@ -269,6 +269,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_EXT_extended_dynamic_state", "VK_KHR_deferred_host_operations", "VK_KHR_pipeline_executable_properties", +"VK_EXT_host_image_copy", "VK_KHR_map_memory2", "VK_EXT_shader_atomic_float2", "VK_EXT_swapchain_maintenance1", @@ -366,6 +367,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_QCOM_fragment_density_map_offset", "VK_NV_copy_memory_indirect", "VK_NV_memory_decompression", +"VK_NV_device_generated_commands_compute", "VK_NV_linear_color_attachment", "VK_EXT_image_compression_control_swapchain", "VK_QCOM_image_processing", @@ -663,6 +665,7 @@ namespace VULKAN_HPP_NAMESPACE { "VK_EXT_index_type_uint8", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } }, { "VK_EXT_extended_dynamic_state", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } }, { "VK_KHR_pipeline_executable_properties", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } }, +{ "VK_EXT_host_image_copy", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_copy_commands2", "VK_KHR_format_feature_flags2", } } } } }, { "VK_EXT_shader_atomic_float2", { { "VK_VERSION_1_0", { { "VK_EXT_shader_atomic_float", } } } } }, { "VK_EXT_surface_maintenance1", { { "VK_VERSION_1_0", { { "VK_KHR_surface", "VK_KHR_get_surface_capabilities2", } } } } }, { "VK_EXT_swapchain_maintenance1", { { "VK_VERSION_1_0", { { "VK_KHR_swapchain", "VK_EXT_surface_maintenance1", "VK_KHR_get_physical_device_properties2", } } } } }, @@ -758,6 +761,7 @@ namespace VULKAN_HPP_NAMESPACE { "VK_QCOM_fragment_density_map_offset", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_EXT_fragment_density_map", } } } } }, { "VK_NV_copy_memory_indirect", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_buffer_device_address", } } } } }, { "VK_NV_memory_decompression", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_buffer_device_address", } } } } }, +{ "VK_NV_device_generated_commands_compute", { { "VK_VERSION_1_0", { { "VK_NV_device_generated_commands", } } } } }, { "VK_NV_linear_color_attachment", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } }, { "VK_GOOGLE_surfaceless_query", { { "VK_VERSION_1_0", { { "VK_KHR_surface", } } } } }, { "VK_EXT_image_compression_control_swapchain", { { "VK_VERSION_1_0", { { "VK_EXT_image_compression_control", } } } } }, @@ -1432,7 +1436,7 @@ namespace VULKAN_HPP_NAMESPACE || ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_shader_atomic_float" ) || ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_index_type_uint8" ) || ( extension == "VK_EXT_extended_dynamic_state" ) || ( extension == "VK_KHR_deferred_host_operations" ) || - ( extension == "VK_KHR_pipeline_executable_properties" ) || ( extension == "VK_KHR_map_memory2" ) || + ( extension == "VK_KHR_pipeline_executable_properties" ) || ( extension == "VK_EXT_host_image_copy" ) || ( extension == "VK_KHR_map_memory2" ) || ( extension == "VK_EXT_shader_atomic_float2" ) || ( extension == "VK_EXT_swapchain_maintenance1" ) || ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || ( extension == "VK_NV_device_generated_commands" ) || ( extension == "VK_NV_inherited_viewport_scissor" ) || ( extension == "VK_KHR_shader_integer_dot_product" ) || @@ -1483,7 +1487,8 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_ARM_shader_core_properties" ) || ( extension == "VK_EXT_image_sliced_view_of_3d" ) || ( extension == "VK_VALVE_descriptor_set_host_mapping" ) || ( extension == "VK_EXT_depth_clamp_zero_one" ) || ( extension == "VK_EXT_non_seamless_cube_map" ) || ( extension == "VK_QCOM_fragment_density_map_offset" ) || - ( extension == "VK_NV_copy_memory_indirect" ) || ( extension == "VK_NV_memory_decompression" ) || ( extension == "VK_NV_linear_color_attachment" ) || + ( extension == "VK_NV_copy_memory_indirect" ) || ( extension == "VK_NV_memory_decompression" ) || + ( extension == "VK_NV_device_generated_commands_compute" ) || ( extension == "VK_NV_linear_color_attachment" ) || ( extension == "VK_EXT_image_compression_control_swapchain" ) || ( extension == "VK_QCOM_image_processing" ) || ( extension == "VK_EXT_external_memory_acquire_unmodified" ) || ( extension == "VK_EXT_extended_dynamic_state3" ) || ( extension == "VK_EXT_subpass_merge_feedback" ) || ( extension == "VK_EXT_shader_module_identifier" ) || diff --git a/include/vulkan/vulkan_funcs.hpp b/include/vulkan/vulkan_funcs.hpp index 6b43434..b8cee1b 100644 --- a/include/vulkan/vulkan_funcs.hpp +++ b/include/vulkan/vulkan_funcs.hpp @@ -18008,6 +18008,146 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_host_image_copy === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast( pCopyMemoryToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast( ©MemoryToImageInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast( pCopyImageToMemoryInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast( ©ImageToMemoryInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCopyImageToImageEXT( m_device, reinterpret_cast( pCopyImageToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = d.vkCopyImageToImageEXT( m_device, reinterpret_cast( ©ImageToImageInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkTransitionImageLayoutEXT( m_device, transitionCount, reinterpret_cast( pTransitions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkResult result = + d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast( transitions.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); + + return createResultValueType( static_cast( result ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Device::getImageSubresourceLayout2EXT( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout; + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2EXT( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get(); + d.vkGetImageSubresourceLayout2EXT( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_map_memory2 === template @@ -19429,54 +19569,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_image_compression_control === - - template - VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout2EXT( m_device, - static_cast( image ), - reinterpret_cast( pSubresource ), - reinterpret_cast( pLayout ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Device::getImageSubresourceLayout2EXT( - VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout; - d.vkGetImageSubresourceLayout2EXT( m_device, - static_cast( image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); - - return layout; - } - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2EXT( - VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - - VULKAN_HPP_NAMESPACE::StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get(); - d.vkGetImageSubresourceLayout2EXT( m_device, - static_cast( image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); - - return structureChain; - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_device_fault === template @@ -21107,6 +21199,80 @@ namespace VULKAN_HPP_NAMESPACE m_commandBuffer, static_cast( indirectCommandsAddress ), static_cast( indirectCommandsCountAddress ), stride ); } + //=== VK_NV_device_generated_commands_compute === + + template + VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetPipelineIndirectMemoryRequirementsNV( + m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdUpdatePipelineIndirectBuffer( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + template + VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast( pInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast( &info ) ); + + return static_cast( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_extended_dynamic_state3 === template diff --git a/include/vulkan/vulkan_handles.hpp b/include/vulkan/vulkan_handles.hpp index 9981076..f0545d9 100644 --- a/include/vulkan/vulkan_handles.hpp +++ b/include/vulkan/vulkan_handles.hpp @@ -1141,6 +1141,20 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineExecutableStatisticKHR; struct PipelineExecutableInternalRepresentationKHR; + //=== VK_EXT_host_image_copy === + struct PhysicalDeviceHostImageCopyFeaturesEXT; + struct PhysicalDeviceHostImageCopyPropertiesEXT; + struct MemoryToImageCopyEXT; + struct ImageToMemoryCopyEXT; + struct CopyMemoryToImageInfoEXT; + struct CopyImageToMemoryInfoEXT; + struct CopyImageToImageInfoEXT; + struct HostImageLayoutTransitionInfoEXT; + struct SubresourceHostMemcpySizeEXT; + struct HostImageCopyDevicePerformanceQueryEXT; + struct SubresourceLayout2EXT; + struct ImageSubresource2EXT; + //=== VK_KHR_map_memory2 === struct MemoryMapInfoKHR; struct MemoryUnmapInfoKHR; @@ -1328,8 +1342,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_image_compression_control === struct PhysicalDeviceImageCompressionControlFeaturesEXT; struct ImageCompressionControlEXT; - struct SubresourceLayout2EXT; - struct ImageSubresource2EXT; struct ImageCompressionPropertiesEXT; //=== VK_EXT_attachment_feedback_loop_layout === @@ -1523,6 +1535,12 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceMemoryDecompressionFeaturesNV; struct PhysicalDeviceMemoryDecompressionPropertiesNV; + //=== VK_NV_device_generated_commands_compute === + struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + struct ComputePipelineIndirectBufferInfoNV; + struct PipelineIndirectDeviceAddressInfoNV; + struct BindPipelineIndirectCommandNV; + //=== VK_NV_linear_color_attachment === struct PhysicalDeviceLinearColorAttachmentFeaturesNV; @@ -5945,6 +5963,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_NV_device_generated_commands_compute === + + template + void updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_extended_dynamic_state3 === template @@ -11874,6 +11899,67 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_host_image_copy === + + template + VULKAN_HPP_NODISCARD Result copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result transitionImageLayoutEXT( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT + getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_map_memory2 === template @@ -12187,26 +12273,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_image_compression_control === - - template - void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT - getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain - getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_device_fault === template @@ -12616,6 +12682,32 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_NV_device_generated_commands_compute === + + template + void getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_shader_module_identifier === template diff --git a/include/vulkan/vulkan_hash.hpp b/include/vulkan/vulkan_hash.hpp index 7cec116..f7305e7 100644 --- a/include/vulkan/vulkan_hash.hpp +++ b/include/vulkan/vulkan_hash.hpp @@ -1373,6 +1373,17 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV const & bindPipelineIndirectCommandNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindPipelineIndirectCommandNV.pipelineAddress ); + return seed; + } + }; + template <> struct hash { @@ -2273,6 +2284,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV const & computePipelineIndirectBufferInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.deviceAddress ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.size ); + VULKAN_HPP_HASH_COMBINE( seed, computePipelineIndirectBufferInfoNV.pipelineDeviceAddressCaptureReplay ); + return seed; + } + }; + template <> struct hash { @@ -2475,6 +2501,60 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT const & copyImageToImageInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.pRegions ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT const & imageToMemoryCopyEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.pHostPointer ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.memoryRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.memoryImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT const & copyImageToMemoryInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.pRegions ); + return seed; + } + }; + template <> struct hash { @@ -2504,6 +2584,41 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT const & memoryToImageCopyEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.pHostPointer ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.memoryRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.memoryImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT const & copyMemoryToImageInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.pRegions ); + return seed; + } + }; + template <> struct hash { @@ -5148,6 +5263,37 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT const & hostImageCopyDevicePerformanceQueryEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.optimalDeviceAccess ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.identicalMemoryLayout ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT const & hostImageLayoutTransitionInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.image ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.oldLayout ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.newLayout ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.subresourceRange ); + return seed; + } + }; + # if defined( VK_USE_PLATFORM_IOS_MVK ) template <> struct hash @@ -7522,6 +7668,22 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & + physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedCompute ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedComputePipelines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.deviceGeneratedComputeCaptureReplay ); + return seed; + } + }; + template <> struct hash { @@ -8249,6 +8411,42 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT const & physicalDeviceHostImageCopyFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.hostImageCopy ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT const & physicalDeviceHostImageCopyPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.copySrcLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pCopySrcLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.copyDstLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pCopyDstLayouts ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.optimalTilingLayoutUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.identicalMemoryTypeRequirements ); + return seed; + } + }; + template <> struct hash { @@ -11486,6 +11684,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV const & pipelineIndirectDeviceAddressInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pipelineBindPoint ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineIndirectDeviceAddressInfoNV.pipeline ); + return seed; + } + }; + template <> struct hash { @@ -13353,6 +13565,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT const & subresourceHostMemcpySizeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.size ); + return seed; + } + }; + template <> struct hash { diff --git a/include/vulkan/vulkan_raii.hpp b/include/vulkan/vulkan_raii.hpp index 9c1939c..df3353f 100644 --- a/include/vulkan/vulkan_raii.hpp +++ b/include/vulkan/vulkan_raii.hpp @@ -1331,6 +1331,13 @@ namespace VULKAN_HPP_NAMESPACE vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + //=== VK_KHR_map_memory2 === vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); @@ -1446,9 +1453,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCmdResolveImage2 ) vkCmdResolveImage2 = vkCmdResolveImage2KHR; - //=== VK_EXT_image_compression_control === - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); - //=== VK_EXT_device_fault === vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); @@ -1569,6 +1573,13 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBuffer = PFN_vkCmdUpdatePipelineIndirectBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBuffer" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + //=== VK_EXT_extended_dynamic_state3 === vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); @@ -2195,6 +2206,13 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + //=== VK_EXT_host_image_copy === + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; + //=== VK_KHR_map_memory2 === PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; @@ -2274,9 +2292,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; - //=== VK_EXT_image_compression_control === - PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; - //=== VK_EXT_device_fault === PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; @@ -2386,6 +2401,11 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; + //=== VK_NV_device_generated_commands_compute === + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBuffer vkCmdUpdatePipelineIndirectBuffer = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; + //=== VK_EXT_extended_dynamic_state3 === PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; @@ -4022,6 +4042,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const; + //=== VK_EXT_host_image_copy === + + void copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const; + + void copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const; + + void copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const; + + void transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const; + //=== VK_KHR_map_memory2 === VULKAN_HPP_NODISCARD void * mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const; @@ -4209,6 +4239,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT; + //=== VK_NV_device_generated_commands_compute === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress + getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_shader_module_identifier === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT @@ -5883,6 +5925,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress, uint32_t stride ) const VULKAN_HPP_NOEXCEPT; + //=== VK_NV_device_generated_commands_compute === + + void updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_extended_dynamic_state3 === void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT; @@ -8103,7 +8150,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT getDrmFormatModifierPropertiesEXT() const; - //=== VK_EXT_image_compression_control === + //=== VK_EXT_host_image_copy === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT; @@ -18150,6 +18197,77 @@ namespace VULKAN_HPP_NAMESPACE return internalRepresentations; } + //=== VK_EXT_host_image_copy === + + VULKAN_HPP_INLINE void Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImageEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkCopyMemoryToImageEXT( static_cast( m_device ), + reinterpret_cast( ©MemoryToImageInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); + } + + VULKAN_HPP_INLINE void Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemoryEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkCopyImageToMemoryEXT( static_cast( m_device ), + reinterpret_cast( ©ImageToMemoryInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); + } + + VULKAN_HPP_INLINE void Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImageEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkCopyImageToImageEXT( static_cast( m_device ), + reinterpret_cast( ©ImageToImageInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); + } + + VULKAN_HPP_INLINE void Device::transitionImageLayoutEXT( + VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayoutEXT && "Function requires " ); + + VkResult result = getDispatcher()->vkTransitionImageLayoutEXT( + static_cast( m_device ), transitions.size(), reinterpret_cast( transitions.data() ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout; + getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get(); + getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + //=== VK_KHR_map_memory2 === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const @@ -18837,40 +18955,6 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &resolveImageInfo ) ); } - //=== VK_EXT_image_compression_control === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT - Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && - "Function requires " ); - - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout; - getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), - static_cast( m_image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); - - return layout; - } - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain - Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && - "Function requires " ); - - VULKAN_HPP_NAMESPACE::StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get(); - getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), - static_cast( m_image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); - - return structureChain; - } - //=== VK_EXT_device_fault === VULKAN_HPP_NODISCARD @@ -19650,6 +19734,60 @@ namespace VULKAN_HPP_NAMESPACE stride ); } + //=== VK_NV_device_generated_commands_compute === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdatePipelineIndirectBuffer && + "Function requires " ); + + getDispatcher()->vkCmdUpdatePipelineIndirectBuffer( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress + Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectDeviceAddressNV && + "Function requires " ); + + VkDeviceAddress result = getDispatcher()->vkGetPipelineIndirectDeviceAddressNV( + static_cast( m_device ), reinterpret_cast( &info ) ); + + return static_cast( result ); + } + //=== VK_EXT_extended_dynamic_state3 === VULKAN_HPP_INLINE void diff --git a/include/vulkan/vulkan_static_assertions.hpp b/include/vulkan/vulkan_static_assertions.hpp index fb0b523..ba79591 100644 --- a/include/vulkan/vulkan_static_assertions.hpp +++ b/include/vulkan/vulkan_static_assertions.hpp @@ -4560,6 +4560,80 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" ); +//=== VK_EXT_host_image_copy === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT ) == sizeof( VkPhysicalDeviceHostImageCopyFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostImageCopyFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT ) == sizeof( VkPhysicalDeviceHostImageCopyPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostImageCopyPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT ) == sizeof( VkMemoryToImageCopyEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryToImageCopyEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT ) == sizeof( VkImageToMemoryCopyEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageToMemoryCopyEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT ) == sizeof( VkCopyMemoryToImageInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToImageInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT ) == sizeof( VkCopyImageToMemoryInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToMemoryInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT ) == sizeof( VkCopyImageToImageInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToImageInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT ) == sizeof( VkHostImageLayoutTransitionInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostImageLayoutTransitionInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT ) == sizeof( VkSubresourceHostMemcpySizeEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceHostMemcpySizeEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT ) == sizeof( VkHostImageCopyDevicePerformanceQueryEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostImageCopyDevicePerformanceQueryEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT ) == sizeof( VkSubresourceLayout2EXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceLayout2EXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT ) == sizeof( VkImageSubresource2EXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresource2EXT is not nothrow_move_constructible!" ); + //=== VK_KHR_map_memory2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR ) == sizeof( VkMemoryMapInfoKHR ), "struct and wrapper have different size!" ); @@ -5427,17 +5501,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "ImageCompressionControlEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT ) == sizeof( VkSubresourceLayout2EXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "SubresourceLayout2EXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT ) == sizeof( VkImageSubresource2EXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "ImageSubresource2EXT is not nothrow_move_constructible!" ); - VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT ) == sizeof( VkImageCompressionPropertiesEXT ), "struct and wrapper have different size!" ); VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -6222,6 +6285,36 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceMemoryDecompressionPropertiesNV is not nothrow_move_constructible!" ); +//=== VK_NV_device_generated_commands_compute === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV ) == sizeof( VkComputePipelineIndirectBufferInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ComputePipelineIndirectBufferInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV ) == sizeof( VkPipelineIndirectDeviceAddressInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineIndirectDeviceAddressInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV ) == sizeof( VkBindPipelineIndirectCommandNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindPipelineIndirectCommandNV is not nothrow_move_constructible!" ); + //=== VK_NV_linear_color_attachment === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) == diff --git a/include/vulkan/vulkan_structs.hpp b/include/vulkan/vulkan_structs.hpp index ab71413..97dbee4 100644 --- a/include/vulkan/vulkan_structs.hpp +++ b/include/vulkan/vulkan_structs.hpp @@ -8173,6 +8173,84 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; }; + struct BindPipelineIndirectCommandNV + { + using NativeType = VkBindPipelineIndirectCommandNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineAddress( pipelineAddress_ ) + { + } + + VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindPipelineIndirectCommandNV( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + : BindPipelineIndirectCommandNV( *reinterpret_cast( &rhs ) ) + { + } + + BindPipelineIndirectCommandNV & operator=( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindPipelineIndirectCommandNV & operator=( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindPipelineIndirectCommandNV & setPipelineAddress( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ ) VULKAN_HPP_NOEXCEPT + { + pipelineAddress = pipelineAddress_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindPipelineIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindPipelineIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( pipelineAddress ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindPipelineIndirectCommandNV const & ) const = default; +#else + bool operator==( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( pipelineAddress == rhs.pipelineAddress ); +# endif + } + + bool operator!=( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress = {}; + }; + struct BindShaderGroupIndirectCommandNV { using NativeType = VkBindShaderGroupIndirectCommandNV; @@ -15440,6 +15518,127 @@ namespace VULKAN_HPP_NAMESPACE using Type = ComputePipelineCreateInfo; }; + struct ComputePipelineIndirectBufferInfoNV + { + using NativeType = VkComputePipelineIndirectBufferInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineIndirectBufferInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceAddress( deviceAddress_ ) + , size( size_ ) + , pipelineDeviceAddressCaptureReplay( pipelineDeviceAddressCaptureReplay_ ) + { + } + + VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ComputePipelineIndirectBufferInfoNV( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : ComputePipelineIndirectBufferInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + ComputePipelineIndirectBufferInfoNV & operator=( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ComputePipelineIndirectBufferInfoNV & operator=( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & + setPipelineDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + pipelineDeviceAddressCaptureReplay = pipelineDeviceAddressCaptureReplay_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkComputePipelineIndirectBufferInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComputePipelineIndirectBufferInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceAddress, size, pipelineDeviceAddressCaptureReplay ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ComputePipelineIndirectBufferInfoNV const & ) const = default; +#else + bool operator==( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ) && + ( pipelineDeviceAddressCaptureReplay == rhs.pipelineDeviceAddressCaptureReplay ); +# endif + } + + bool operator!=( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineIndirectBufferInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay = {}; + }; + + template <> + struct CppType + { + using Type = ComputePipelineIndirectBufferInfoNV; + }; + struct ConditionalRenderingBeginInfoEXT { using NativeType = VkConditionalRenderingBeginInfoEXT; @@ -17284,6 +17483,515 @@ namespace VULKAN_HPP_NAMESPACE }; using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; + struct CopyImageToImageInfoEXT + { + using NativeType = VkCopyImageToImageInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToImageInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyImageToImageInfoEXT( CopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToImageInfoEXT( VkCopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToImageInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyImageToImageInfoEXT & operator=( CopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyImageToImageInfoEXT & operator=( VkCopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToImageInfoEXT & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyImageToImageInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToImageInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageToImageInfoEXT const & ) const = default; +#else + bool operator==( CopyImageToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && + ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToImageInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyImageToImageInfoEXT; + }; + + struct ImageToMemoryCopyEXT + { + using NativeType = VkImageToMemoryCopyEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageToMemoryCopyEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageToMemoryCopyEXT( void * pHostPointer_ = {}, + uint32_t memoryRowLength_ = {}, + uint32_t memoryImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pHostPointer( pHostPointer_ ) + , memoryRowLength( memoryRowLength_ ) + , memoryImageHeight( memoryImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageToMemoryCopyEXT( ImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageToMemoryCopyEXT( VkImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageToMemoryCopyEXT( *reinterpret_cast( &rhs ) ) + { + } + + ImageToMemoryCopyEXT & operator=( ImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageToMemoryCopyEXT & operator=( VkImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT + { + memoryRowLength = memoryRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + memoryImageHeight = memoryImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageToMemoryCopyEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageToMemoryCopyEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageToMemoryCopyEXT const & ) const = default; +#else + bool operator==( ImageToMemoryCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) && + ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( ImageToMemoryCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageToMemoryCopyEXT; + const void * pNext = {}; + void * pHostPointer = {}; + uint32_t memoryRowLength = {}; + uint32_t memoryImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + template <> + struct CppType + { + using Type = ImageToMemoryCopyEXT; + }; + + struct CopyImageToMemoryInfoEXT + { + using NativeType = VkCopyImageToMemoryInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToMemoryInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfoEXT( CopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyImageToMemoryInfoEXT( VkCopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToMemoryInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , srcImage( srcImage_ ) + , srcImageLayout( srcImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyImageToMemoryInfoEXT & operator=( CopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyImageToMemoryInfoEXT & operator=( VkCopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + { + srcImage = srcImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + srcImageLayout = srcImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyImageToMemoryInfoEXT & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyImageToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyImageToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, srcImage, srcImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyImageToMemoryInfoEXT const & ) const = default; +#else + bool operator==( CopyImageToMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) && + ( srcImageLayout == rhs.srcImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyImageToMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToMemoryInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyImageToMemoryInfoEXT; + }; + struct CopyMemoryIndirectCommandNV { using NativeType = VkCopyMemoryIndirectCommandNV; @@ -17614,6 +18322,324 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; }; + struct MemoryToImageCopyEXT + { + using NativeType = VkMemoryToImageCopyEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryToImageCopyEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR MemoryToImageCopyEXT( const void * pHostPointer_ = {}, + uint32_t memoryRowLength_ = {}, + uint32_t memoryImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pHostPointer( pHostPointer_ ) + , memoryRowLength( memoryRowLength_ ) + , memoryImageHeight( memoryImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + { + } + + VULKAN_HPP_CONSTEXPR MemoryToImageCopyEXT( MemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + MemoryToImageCopyEXT( VkMemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : MemoryToImageCopyEXT( *reinterpret_cast( &rhs ) ) + { + } + + MemoryToImageCopyEXT & operator=( MemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + MemoryToImageCopyEXT & operator=( VkMemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setPHostPointer( const void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT + { + memoryRowLength = memoryRowLength_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + memoryImageHeight = memoryImageHeight_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & + setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkMemoryToImageCopyEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryToImageCopyEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( MemoryToImageCopyEXT const & ) const = default; +#else + bool operator==( MemoryToImageCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) && + ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && + ( imageExtent == rhs.imageExtent ); +# endif + } + + bool operator!=( MemoryToImageCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryToImageCopyEXT; + const void * pNext = {}; + const void * pHostPointer = {}; + uint32_t memoryRowLength = {}; + uint32_t memoryImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + + template <> + struct CppType + { + using Type = MemoryToImageCopyEXT; + }; + + struct CopyMemoryToImageInfoEXT + { + using NativeType = VkCopyMemoryToImageInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToImageInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( regionCount_ ) + , pRegions( pRegions_ ) + { + } + + VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfoEXT( CopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CopyMemoryToImageInfoEXT( VkCopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToImageInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyMemoryToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , dstImage( dstImage_ ) + , dstImageLayout( dstImageLayout_ ) + , regionCount( static_cast( regions_.size() ) ) + , pRegions( regions_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + CopyMemoryToImageInfoEXT & operator=( CopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CopyMemoryToImageInfoEXT & operator=( VkCopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + { + dstImage = dstImage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + { + dstImageLayout = dstImageLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = regionCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + CopyMemoryToImageInfoEXT & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + { + regionCount = static_cast( regions_.size() ); + pRegions = regions_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCopyMemoryToImageInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyMemoryToImageInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, dstImage, dstImageLayout, regionCount, pRegions ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CopyMemoryToImageInfoEXT const & ) const = default; +#else + bool operator==( CopyMemoryToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dstImage == rhs.dstImage ) && + ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); +# endif + } + + bool operator!=( CopyMemoryToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToImageInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions = {}; + }; + + template <> + struct CppType + { + using Type = CopyMemoryToImageInfoEXT; + }; + struct CopyMemoryToMicromapInfoEXT { using NativeType = VkCopyMemoryToMicromapInfoEXT; @@ -38861,6 +39887,224 @@ namespace VULKAN_HPP_NAMESPACE using Type = HeadlessSurfaceCreateInfoEXT; }; + struct HostImageCopyDevicePerformanceQueryEXT + { + using NativeType = VkHostImageCopyDevicePerformanceQueryEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageCopyDevicePerformanceQueryEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQueryEXT( VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , optimalDeviceAccess( optimalDeviceAccess_ ) + , identicalMemoryLayout( identicalMemoryLayout_ ) + { + } + + VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQueryEXT( HostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HostImageCopyDevicePerformanceQueryEXT( VkHostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HostImageCopyDevicePerformanceQueryEXT( *reinterpret_cast( &rhs ) ) + { + } + + HostImageCopyDevicePerformanceQueryEXT & operator=( HostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + HostImageCopyDevicePerformanceQueryEXT & operator=( VkHostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkHostImageCopyDevicePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostImageCopyDevicePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, optimalDeviceAccess, identicalMemoryLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HostImageCopyDevicePerformanceQueryEXT const & ) const = default; +#else + bool operator==( HostImageCopyDevicePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimalDeviceAccess == rhs.optimalDeviceAccess ) && + ( identicalMemoryLayout == rhs.identicalMemoryLayout ); +# endif + } + + bool operator!=( HostImageCopyDevicePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageCopyDevicePerformanceQueryEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout = {}; + }; + + template <> + struct CppType + { + using Type = HostImageCopyDevicePerformanceQueryEXT; + }; + + struct HostImageLayoutTransitionInfoEXT + { + using NativeType = VkHostImageLayoutTransitionInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageLayoutTransitionInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , image( image_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , subresourceRange( subresourceRange_ ) + { + } + + VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfoEXT( HostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HostImageLayoutTransitionInfoEXT( VkHostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : HostImageLayoutTransitionInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + HostImageLayoutTransitionInfoEXT & operator=( HostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + HostImageLayoutTransitionInfoEXT & operator=( VkHostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & + setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkHostImageLayoutTransitionInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHostImageLayoutTransitionInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, image, oldLayout, newLayout, subresourceRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HostImageLayoutTransitionInfoEXT const & ) const = default; +#else + bool operator==( HostImageLayoutTransitionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && + ( subresourceRange == rhs.subresourceRange ); +# endif + } + + bool operator!=( HostImageLayoutTransitionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageLayoutTransitionInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + + template <> + struct CppType + { + using Type = HostImageLayoutTransitionInfoEXT; + }; + #if defined( VK_USE_PLATFORM_IOS_MVK ) struct IOSSurfaceCreateInfoMVK { @@ -56538,6 +57782,133 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; }; + struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , deviceGeneratedCompute( deviceGeneratedCompute_ ) + , deviceGeneratedComputePipelines( deviceGeneratedComputePipelines_ ) + , deviceGeneratedComputeCaptureReplay( deviceGeneratedComputeCaptureReplay_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + operator=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + operator=( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedCompute( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCompute = deviceGeneratedCompute_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedComputePipelines( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedComputePipelines = deviceGeneratedComputePipelines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & + setDeviceGeneratedComputeCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedComputeCaptureReplay = deviceGeneratedComputeCaptureReplay_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceGeneratedCompute, deviceGeneratedComputePipelines, deviceGeneratedComputeCaptureReplay ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCompute == rhs.deviceGeneratedCompute ) && + ( deviceGeneratedComputePipelines == rhs.deviceGeneratedComputePipelines ) && + ( deviceGeneratedComputeCaptureReplay == rhs.deviceGeneratedComputeCaptureReplay ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCompute = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + }; + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV { using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; @@ -61591,6 +62962,295 @@ namespace VULKAN_HPP_NAMESPACE }; using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + struct PhysicalDeviceHostImageCopyFeaturesEXT + { + using NativeType = VkPhysicalDeviceHostImageCopyFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , hostImageCopy( hostImageCopy_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeaturesEXT( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostImageCopyFeaturesEXT( VkPhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostImageCopyFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceHostImageCopyFeaturesEXT & operator=( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceHostImageCopyFeaturesEXT & operator=( VkPhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeaturesEXT & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT + { + hostImageCopy = hostImageCopy_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceHostImageCopyFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hostImageCopy ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostImageCopyFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostImageCopy == rhs.hostImageCopy ); +# endif + } + + bool operator!=( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHostImageCopyFeaturesEXT; + }; + + struct PhysicalDeviceHostImageCopyPropertiesEXT + { + using NativeType = VkPhysicalDeviceHostImageCopyPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT( uint32_t copySrcLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {}, + uint32_t copyDstLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , copySrcLayoutCount( copySrcLayoutCount_ ) + , pCopySrcLayouts( pCopySrcLayouts_ ) + , copyDstLayoutCount( copyDstLayoutCount_ ) + , pCopyDstLayouts( pCopyDstLayouts_ ) + , optimalTilingLayoutUUID( optimalTilingLayoutUUID_ ) + , identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHostImageCopyPropertiesEXT( VkPhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostImageCopyPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copySrcLayouts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) + : pNext( pNext_ ) + , copySrcLayoutCount( static_cast( copySrcLayouts_.size() ) ) + , pCopySrcLayouts( copySrcLayouts_.data() ) + , copyDstLayoutCount( static_cast( copyDstLayouts_.size() ) ) + , pCopyDstLayouts( copyDstLayouts_.data() ) + , optimalTilingLayoutUUID( optimalTilingLayoutUUID_ ) + , identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PhysicalDeviceHostImageCopyPropertiesEXT & operator=( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceHostImageCopyPropertiesEXT & operator=( VkPhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setCopySrcLayoutCount( uint32_t copySrcLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + copySrcLayoutCount = copySrcLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + setPCopySrcLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pCopySrcLayouts = pCopySrcLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyPropertiesEXT & + setCopySrcLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copySrcLayouts_ ) VULKAN_HPP_NOEXCEPT + { + copySrcLayoutCount = static_cast( copySrcLayouts_.size() ); + pCopySrcLayouts = copySrcLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setCopyDstLayoutCount( uint32_t copyDstLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + copyDstLayoutCount = copyDstLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + setPCopyDstLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pCopyDstLayouts = pCopyDstLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceHostImageCopyPropertiesEXT & + setCopyDstLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copyDstLayouts_ ) VULKAN_HPP_NOEXCEPT + { + copyDstLayoutCount = static_cast( copyDstLayouts_.size() ); + pCopyDstLayouts = copyDstLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + setOptimalTilingLayoutUUID( std::array optimalTilingLayoutUUID_ ) VULKAN_HPP_NOEXCEPT + { + optimalTilingLayoutUUID = optimalTilingLayoutUUID_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + setIdenticalMemoryTypeRequirements( VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ ) VULKAN_HPP_NOEXCEPT + { + identicalMemoryTypeRequirements = identicalMemoryTypeRequirements_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceHostImageCopyPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostImageCopyPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, copySrcLayoutCount, pCopySrcLayouts, copyDstLayoutCount, pCopyDstLayouts, optimalTilingLayoutUUID, identicalMemoryTypeRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHostImageCopyPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( copySrcLayoutCount == rhs.copySrcLayoutCount ) && + ( pCopySrcLayouts == rhs.pCopySrcLayouts ) && ( copyDstLayoutCount == rhs.copyDstLayoutCount ) && ( pCopyDstLayouts == rhs.pCopyDstLayouts ) && + ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements ); +# endif + } + + bool operator!=( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT; + void * pNext = {}; + uint32_t copySrcLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts = {}; + uint32_t copyDstLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D optimalTilingLayoutUUID = {}; + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHostImageCopyPropertiesEXT; + }; + struct PhysicalDeviceHostQueryResetFeatures { using NativeType = VkPhysicalDeviceHostQueryResetFeatures; @@ -83667,6 +85327,117 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineFragmentShadingRateStateCreateInfoKHR; }; + struct PipelineIndirectDeviceAddressInfoNV + { + using NativeType = VkPipelineIndirectDeviceAddressInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineIndirectDeviceAddressInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineIndirectDeviceAddressInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipeline( pipeline_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineIndirectDeviceAddressInfoNV( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineIndirectDeviceAddressInfoNV( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineIndirectDeviceAddressInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + PipelineIndirectDeviceAddressInfoNV & operator=( PipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineIndirectDeviceAddressInfoNV & operator=( VkPipelineIndirectDeviceAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & + setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineIndirectDeviceAddressInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineIndirectDeviceAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineIndirectDeviceAddressInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBindPoint, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineIndirectDeviceAddressInfoNV const & ) const = default; +#else + bool operator==( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( PipelineIndirectDeviceAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineIndirectDeviceAddressInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = PipelineIndirectDeviceAddressInfoNV; + }; + struct PipelineInfoKHR { using NativeType = VkPipelineInfoKHR; @@ -99024,6 +100795,88 @@ namespace VULKAN_HPP_NAMESPACE using Type = SubpassShadingPipelineCreateInfoHUAWEI; }; + struct SubresourceHostMemcpySizeEXT + { + using NativeType = VkSubresourceHostMemcpySizeEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceHostMemcpySizeEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySizeEXT( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySizeEXT( SubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + SubresourceHostMemcpySizeEXT( VkSubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : SubresourceHostMemcpySizeEXT( *reinterpret_cast( &rhs ) ) + { + } + + SubresourceHostMemcpySizeEXT & operator=( SubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + SubresourceHostMemcpySizeEXT & operator=( VkSubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkSubresourceHostMemcpySizeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceHostMemcpySizeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( SubresourceHostMemcpySizeEXT const & ) const = default; +#else + bool operator==( SubresourceHostMemcpySizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size ); +# endif + } + + bool operator!=( SubresourceHostMemcpySizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceHostMemcpySizeEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = SubresourceHostMemcpySizeEXT; + }; + struct SubresourceLayout2EXT { using NativeType = VkSubresourceLayout2EXT; diff --git a/include/vulkan/vulkan_to_string.hpp b/include/vulkan/vulkan_to_string.hpp index a0a1431..7e3a8f5 100644 --- a/include/vulkan/vulkan_to_string.hpp +++ b/include/vulkan/vulkan_to_string.hpp @@ -178,6 +178,8 @@ namespace VULKAN_HPP_NAMESPACE result += "FragmentDensityMapEXT | "; if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | "; + if ( value & ImageUsageFlagBits::eHostTransferEXT ) + result += "HostTransferEXT | "; #if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR ) result += "VideoEncodeDstKHR | "; @@ -952,6 +954,8 @@ namespace VULKAN_HPP_NAMESPACE result += "DescriptorBufferEXT | "; if ( value & DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT ) result += "EmbeddedImmutableSamplersEXT | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV ) + result += "IndirectBindableNV | "; if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT ) result += "HostOnlyPoolEXT | "; @@ -1801,6 +1805,8 @@ namespace VULKAN_HPP_NAMESPACE result += "FragmentDensityMapEXT | "; if ( value & FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | "; + if ( value & FormatFeatureFlagBits2::eHostImageTransferEXT ) + result += "HostImageTransferEXT | "; #if defined( VK_ENABLE_BETA_EXTENSIONS ) if ( value & FormatFeatureFlagBits2::eVideoEncodeInputKHR ) result += "VideoEncodeInputKHR | "; @@ -2775,6 +2781,20 @@ namespace VULKAN_HPP_NAMESPACE return "{}"; } + //=== VK_EXT_host_image_copy === + + VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & HostImageCopyFlagBitsEXT::eMemcpy ) + result += "Memcpy | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + //=== VK_KHR_map_memory2 === VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagsKHR ) @@ -3970,6 +3990,16 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR"; case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR"; case StructureType::ePipelineExecutableInternalRepresentationKHR: return "PipelineExecutableInternalRepresentationKHR"; + case StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT: return "PhysicalDeviceHostImageCopyFeaturesEXT"; + case StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT: return "PhysicalDeviceHostImageCopyPropertiesEXT"; + case StructureType::eMemoryToImageCopyEXT: return "MemoryToImageCopyEXT"; + case StructureType::eImageToMemoryCopyEXT: return "ImageToMemoryCopyEXT"; + case StructureType::eCopyImageToMemoryInfoEXT: return "CopyImageToMemoryInfoEXT"; + case StructureType::eCopyMemoryToImageInfoEXT: return "CopyMemoryToImageInfoEXT"; + case StructureType::eHostImageLayoutTransitionInfoEXT: return "HostImageLayoutTransitionInfoEXT"; + case StructureType::eCopyImageToImageInfoEXT: return "CopyImageToImageInfoEXT"; + case StructureType::eSubresourceHostMemcpySizeEXT: return "SubresourceHostMemcpySizeEXT"; + case StructureType::eHostImageCopyDevicePerformanceQueryEXT: return "HostImageCopyDevicePerformanceQueryEXT"; case StructureType::eMemoryMapInfoKHR: return "MemoryMapInfoKHR"; case StructureType::eMemoryUnmapInfoKHR: return "MemoryUnmapInfoKHR"; case StructureType::ePhysicalDeviceShaderAtomicFloat2FeaturesEXT: return "PhysicalDeviceShaderAtomicFloat2FeaturesEXT"; @@ -4178,6 +4208,9 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesNV: return "PhysicalDeviceCopyMemoryIndirectPropertiesNV"; case StructureType::ePhysicalDeviceMemoryDecompressionFeaturesNV: return "PhysicalDeviceMemoryDecompressionFeaturesNV"; case StructureType::ePhysicalDeviceMemoryDecompressionPropertiesNV: return "PhysicalDeviceMemoryDecompressionPropertiesNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV: return "PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV"; + case StructureType::eComputePipelineIndirectBufferInfoNV: return "ComputePipelineIndirectBufferInfoNV"; + case StructureType::ePipelineIndirectDeviceAddressInfoNV: return "PipelineIndirectDeviceAddressInfoNV"; case StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV: return "PhysicalDeviceLinearColorAttachmentFeaturesNV"; case StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT: return "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT"; case StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM: return "PhysicalDeviceImageProcessingFeaturesQCOM"; @@ -4687,6 +4720,7 @@ namespace VULKAN_HPP_NAMESPACE case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case ImageUsageFlagBits::eHostTransferEXT: return "HostTransferEXT"; #if defined( VK_ENABLE_BETA_EXTENSIONS ) case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; @@ -5618,6 +5652,7 @@ namespace VULKAN_HPP_NAMESPACE case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR"; case DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT: return "DescriptorBufferEXT"; case DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT: return "EmbeddedImmutableSamplersEXT"; + case DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT: return "HostOnlyPoolEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } @@ -6398,6 +6433,7 @@ namespace VULKAN_HPP_NAMESPACE case FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; case FormatFeatureFlagBits2::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; case FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case FormatFeatureFlagBits2::eHostImageTransferEXT: return "HostImageTransferEXT"; #if defined( VK_ENABLE_BETA_EXTENSIONS ) case FormatFeatureFlagBits2::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; case FormatFeatureFlagBits2::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; @@ -7780,6 +7816,17 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_EXT_host_image_copy === + + VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlagBitsEXT value ) + { + switch ( value ) + { + case HostImageCopyFlagBitsEXT::eMemcpy: return "Memcpy"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_KHR_map_memory2 === VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagBitsKHR ) @@ -7835,6 +7882,8 @@ namespace VULKAN_HPP_NAMESPACE case IndirectCommandsTokenTypeNV::eDraw: return "Draw"; case IndirectCommandsTokenTypeNV::eDrawTasks: return "DrawTasks"; case IndirectCommandsTokenTypeNV::eDrawMeshTasks: return "DrawMeshTasks"; + case IndirectCommandsTokenTypeNV::ePipeline: return "Pipeline"; + case IndirectCommandsTokenTypeNV::eDispatch: return "Dispatch"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } diff --git a/include/vulkan/vulkan_video.hpp b/include/vulkan/vulkan_video.hpp new file mode 100644 index 0000000..7465938 --- /dev/null +++ b/include/vulkan/vulkan_video.hpp @@ -0,0 +1,2696 @@ +// Copyright 2021-2023 The Khronos Group Inc. +// SPDX-License-Identifier: Apache-2.0 OR MIT +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#include +#include +#include +#include +#include +#include +#include +#include + +#if !defined( VULKAN_HPP_VIDEO_NAMESPACE ) +# define VULKAN_HPP_VIDEO_NAMESPACE video +#endif + +namespace VULKAN_HPP_NAMESPACE +{ + namespace VULKAN_HPP_VIDEO_NAMESPACE + { + + //============= + //=== ENUMs === + //============= + + //=== vulkan_video_codec_h264std === + + enum class H264ChromaFormatIdc + { + eMonochrome = STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME, + e420 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_420, + e422 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_422, + e444 = STD_VIDEO_H264_CHROMA_FORMAT_IDC_444, + eInvalid = STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID + }; + + enum class H264ProfileIdc + { + eBaseline = STD_VIDEO_H264_PROFILE_IDC_BASELINE, + eMain = STD_VIDEO_H264_PROFILE_IDC_MAIN, + eHigh = STD_VIDEO_H264_PROFILE_IDC_HIGH, + eHigh444Predictive = STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE, + eInvalid = STD_VIDEO_H264_PROFILE_IDC_INVALID + }; + + enum class H264LevelIdc + { + e1_0 = STD_VIDEO_H264_LEVEL_IDC_1_0, + e1_1 = STD_VIDEO_H264_LEVEL_IDC_1_1, + e1_2 = STD_VIDEO_H264_LEVEL_IDC_1_2, + e1_3 = STD_VIDEO_H264_LEVEL_IDC_1_3, + e2_0 = STD_VIDEO_H264_LEVEL_IDC_2_0, + e2_1 = STD_VIDEO_H264_LEVEL_IDC_2_1, + e2_2 = STD_VIDEO_H264_LEVEL_IDC_2_2, + e3_0 = STD_VIDEO_H264_LEVEL_IDC_3_0, + e3_1 = STD_VIDEO_H264_LEVEL_IDC_3_1, + e3_2 = STD_VIDEO_H264_LEVEL_IDC_3_2, + e4_0 = STD_VIDEO_H264_LEVEL_IDC_4_0, + e4_1 = STD_VIDEO_H264_LEVEL_IDC_4_1, + e4_2 = STD_VIDEO_H264_LEVEL_IDC_4_2, + e5_0 = STD_VIDEO_H264_LEVEL_IDC_5_0, + e5_1 = STD_VIDEO_H264_LEVEL_IDC_5_1, + e5_2 = STD_VIDEO_H264_LEVEL_IDC_5_2, + e6_0 = STD_VIDEO_H264_LEVEL_IDC_6_0, + e6_1 = STD_VIDEO_H264_LEVEL_IDC_6_1, + e6_2 = STD_VIDEO_H264_LEVEL_IDC_6_2, + eInvalid = STD_VIDEO_H264_LEVEL_IDC_INVALID + }; + + enum class H264PocType + { + e0 = STD_VIDEO_H264_POC_TYPE_0, + e1 = STD_VIDEO_H264_POC_TYPE_1, + e2 = STD_VIDEO_H264_POC_TYPE_2, + eInvalid = STD_VIDEO_H264_POC_TYPE_INVALID + }; + + enum class H264AspectRatioIdc + { + eUnspecified = STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED, + eSquare = STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE, + e12_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11, + e10_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11, + e16_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11, + e40_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33, + e24_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11, + e20_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11, + e32_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11, + e80_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33, + e18_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11, + e15_11 = STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11, + e64_33 = STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33, + e160_99 = STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99, + e4_3 = STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3, + e3_2 = STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2, + e2_1 = STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1, + eExtendedSar = STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR, + eInvalid = STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID + }; + + enum class H264WeightedBipredIdc + { + eDefault = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT, + eExplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT, + eImplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT, + eInvalid = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID + }; + + enum class H264ModificationOfPicNumsIdc + { + eShortTermSubtract = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT, + eShortTermAdd = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD, + eLongTerm = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM, + eEnd = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END, + eInvalid = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID + }; + + enum class H264MemMgmtControlOp + { + eEnd = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END, + eUnmarkShortTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM, + eUnmarkLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM, + eMarkLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM, + eSetMaxLongTermIndex = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX, + eUnmarkAll = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL, + eMarkCurrentAsLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM, + eInvalid = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID + }; + + enum class H264CabacInitIdc + { + e0 = STD_VIDEO_H264_CABAC_INIT_IDC_0, + e1 = STD_VIDEO_H264_CABAC_INIT_IDC_1, + e2 = STD_VIDEO_H264_CABAC_INIT_IDC_2, + eInvalid = STD_VIDEO_H264_CABAC_INIT_IDC_INVALID + }; + + enum class H264DisableDeblockingFilterIdc + { + eDisabled = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED, + eEnabled = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED, + ePartial = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL, + eInvalid = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID + }; + + enum class H264SliceType + { + eP = STD_VIDEO_H264_SLICE_TYPE_P, + eB = STD_VIDEO_H264_SLICE_TYPE_B, + eI = STD_VIDEO_H264_SLICE_TYPE_I, + eInvalid = STD_VIDEO_H264_SLICE_TYPE_INVALID + }; + + enum class H264PictureType + { + eP = STD_VIDEO_H264_PICTURE_TYPE_P, + eB = STD_VIDEO_H264_PICTURE_TYPE_B, + eI = STD_VIDEO_H264_PICTURE_TYPE_I, + eIdr = STD_VIDEO_H264_PICTURE_TYPE_IDR, + eInvalid = STD_VIDEO_H264_PICTURE_TYPE_INVALID + }; + + enum class H264NonVclNaluType + { + eSps = STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS, + ePps = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS, + eAud = STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD, + ePrefix = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX, + eEndOfSequence = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE, + eEndOfStream = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM, + ePrecoded = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED, + eInvalid = STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID + }; + + //=== vulkan_video_codec_h264std_decode === + + enum class DecodeH264FieldOrderCount + { + eTop = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP, + eBottom = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM, + eInvalid = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID + }; + + //=== vulkan_video_codec_h265std === + + enum class H265ChromaFormatIdc + { + eMonochrome = STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME, + e420 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_420, + e422 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_422, + e444 = STD_VIDEO_H265_CHROMA_FORMAT_IDC_444, + eInvalid = STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID + }; + + enum class H265ProfileIdc + { + eMain = STD_VIDEO_H265_PROFILE_IDC_MAIN, + eMain10 = STD_VIDEO_H265_PROFILE_IDC_MAIN_10, + eMainStillPicture = STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE, + eFormatRangeExtensions = STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS, + eSccExtensions = STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS, + eInvalid = STD_VIDEO_H265_PROFILE_IDC_INVALID + }; + + enum class H265LevelIdc + { + e1_0 = STD_VIDEO_H265_LEVEL_IDC_1_0, + e2_0 = STD_VIDEO_H265_LEVEL_IDC_2_0, + e2_1 = STD_VIDEO_H265_LEVEL_IDC_2_1, + e3_0 = STD_VIDEO_H265_LEVEL_IDC_3_0, + e3_1 = STD_VIDEO_H265_LEVEL_IDC_3_1, + e4_0 = STD_VIDEO_H265_LEVEL_IDC_4_0, + e4_1 = STD_VIDEO_H265_LEVEL_IDC_4_1, + e5_0 = STD_VIDEO_H265_LEVEL_IDC_5_0, + e5_1 = STD_VIDEO_H265_LEVEL_IDC_5_1, + e5_2 = STD_VIDEO_H265_LEVEL_IDC_5_2, + e6_0 = STD_VIDEO_H265_LEVEL_IDC_6_0, + e6_1 = STD_VIDEO_H265_LEVEL_IDC_6_1, + e6_2 = STD_VIDEO_H265_LEVEL_IDC_6_2, + eInvalid = STD_VIDEO_H265_LEVEL_IDC_INVALID + }; + + enum class H265SliceType + { + eB = STD_VIDEO_H265_SLICE_TYPE_B, + eP = STD_VIDEO_H265_SLICE_TYPE_P, + eI = STD_VIDEO_H265_SLICE_TYPE_I, + eInvalid = STD_VIDEO_H265_SLICE_TYPE_INVALID + }; + + enum class H265PictureType + { + eP = STD_VIDEO_H265_PICTURE_TYPE_P, + eB = STD_VIDEO_H265_PICTURE_TYPE_B, + eI = STD_VIDEO_H265_PICTURE_TYPE_I, + eIdr = STD_VIDEO_H265_PICTURE_TYPE_IDR, + eInvalid = STD_VIDEO_H265_PICTURE_TYPE_INVALID + }; + + enum class H265AspectRatioIdc + { + eUnspecified = STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED, + eSquare = STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE, + e12_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11, + e10_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11, + e16_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11, + e40_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33, + e24_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11, + e20_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11, + e32_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11, + e80_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33, + e18_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11, + e15_11 = STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11, + e64_33 = STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33, + e160_99 = STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99, + e4_3 = STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3, + e3_2 = STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2, + e2_1 = STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1, + eExtendedSar = STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR, + eInvalid = STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID + }; + + //=============== + //=== STRUCTS === + //=============== + + //=== vulkan_video_codec_h264std === + + struct H264SpsVuiFlags + { + using NativeType = StdVideoH264SpsVuiFlags; + + operator StdVideoH264SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SpsVuiFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && + ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && + ( video_full_range_flag == rhs.video_full_range_flag ) && ( color_description_present_flag == rhs.color_description_present_flag ) && + ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && ( timing_info_present_flag == rhs.timing_info_present_flag ) && + ( fixed_frame_rate_flag == rhs.fixed_frame_rate_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && + ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) && + ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ); + } + + bool operator!=( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t color_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t timing_info_present_flag : 1; + uint32_t fixed_frame_rate_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + }; + + struct H264HrdParameters + { + using NativeType = StdVideoH264HrdParameters; + + operator StdVideoH264HrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264HrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && ( cpb_size_scale == rhs.cpb_size_scale ) && + ( reserved1 == rhs.reserved1 ) && ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && + ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && ( cbr_flag == rhs.cbr_flag ) && + ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) && + ( cpb_removal_delay_length_minus1 == rhs.cpb_removal_delay_length_minus1 ) && + ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( time_offset_length == rhs.time_offset_length ); + } + + bool operator!=( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t cpb_cnt_minus1 = {}; + uint8_t bit_rate_scale = {}; + uint8_t cpb_size_scale = {}; + uint8_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cbr_flag = {}; + uint32_t initial_cpb_removal_delay_length_minus1 = {}; + uint32_t cpb_removal_delay_length_minus1 = {}; + uint32_t dpb_output_delay_length_minus1 = {}; + uint32_t time_offset_length = {}; + }; + + struct H264SequenceParameterSetVui + { + using NativeType = StdVideoH264SequenceParameterSetVui; + + operator StdVideoH264SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) && + ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) && + ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coefficients == rhs.matrix_coefficients ) && + ( num_units_in_tick == rhs.num_units_in_tick ) && ( time_scale == rhs.time_scale ) && ( max_num_reorder_frames == rhs.max_num_reorder_frames ) && + ( max_dec_frame_buffering == rhs.max_dec_frame_buffering ) && ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) && + ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) && + ( pHrdParameters == rhs.pHrdParameters ); + } + + bool operator!=( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsVuiFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc aspect_ratio_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc::eUnspecified; + uint16_t sar_width = {}; + uint16_t sar_height = {}; + uint8_t video_format = {}; + uint8_t colour_primaries = {}; + uint8_t transfer_characteristics = {}; + uint8_t matrix_coefficients = {}; + uint32_t num_units_in_tick = {}; + uint32_t time_scale = {}; + uint8_t max_num_reorder_frames = {}; + uint8_t max_dec_frame_buffering = {}; + uint8_t chroma_sample_loc_type_top_field = {}; + uint8_t chroma_sample_loc_type_bottom_field = {}; + uint32_t reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264HrdParameters * pHrdParameters = {}; + }; + + struct H264SpsFlags + { + using NativeType = StdVideoH264SpsFlags; + + operator StdVideoH264SpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( constraint_set0_flag == rhs.constraint_set0_flag ) && ( constraint_set1_flag == rhs.constraint_set1_flag ) && + ( constraint_set2_flag == rhs.constraint_set2_flag ) && ( constraint_set3_flag == rhs.constraint_set3_flag ) && + ( constraint_set4_flag == rhs.constraint_set4_flag ) && ( constraint_set5_flag == rhs.constraint_set5_flag ) && + ( direct_8x8_inference_flag == rhs.direct_8x8_inference_flag ) && ( mb_adaptive_frame_field_flag == rhs.mb_adaptive_frame_field_flag ) && + ( frame_mbs_only_flag == rhs.frame_mbs_only_flag ) && ( delta_pic_order_always_zero_flag == rhs.delta_pic_order_always_zero_flag ) && + ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) && + ( gaps_in_frame_num_value_allowed_flag == rhs.gaps_in_frame_num_value_allowed_flag ) && + ( qpprime_y_zero_transform_bypass_flag == rhs.qpprime_y_zero_transform_bypass_flag ) && ( frame_cropping_flag == rhs.frame_cropping_flag ) && + ( seq_scaling_matrix_present_flag == rhs.seq_scaling_matrix_present_flag ) && ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ); + } + + bool operator!=( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t constraint_set0_flag : 1; + uint32_t constraint_set1_flag : 1; + uint32_t constraint_set2_flag : 1; + uint32_t constraint_set3_flag : 1; + uint32_t constraint_set4_flag : 1; + uint32_t constraint_set5_flag : 1; + uint32_t direct_8x8_inference_flag : 1; + uint32_t mb_adaptive_frame_field_flag : 1; + uint32_t frame_mbs_only_flag : 1; + uint32_t delta_pic_order_always_zero_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t gaps_in_frame_num_value_allowed_flag : 1; + uint32_t qpprime_y_zero_transform_bypass_flag : 1; + uint32_t frame_cropping_flag : 1; + uint32_t seq_scaling_matrix_present_flag : 1; + uint32_t vui_parameters_present_flag : 1; + }; + + struct H264ScalingLists + { + using NativeType = StdVideoH264ScalingLists; + + operator StdVideoH264ScalingLists const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264ScalingLists &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( scaling_list_present_mask == rhs.scaling_list_present_mask ) && ( use_default_scaling_matrix_mask == rhs.use_default_scaling_matrix_mask ) && + ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ); + } + + bool operator!=( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint16_t scaling_list_present_mask = {}; + uint16_t use_default_scaling_matrix_mask = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList4x4 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList8x8 = {}; + }; + + struct H264SequenceParameterSet + { + using NativeType = StdVideoH264SequenceParameterSet; + + operator StdVideoH264SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264SequenceParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( profile_idc == rhs.profile_idc ) && ( level_idc == rhs.level_idc ) && + ( chroma_format_idc == rhs.chroma_format_idc ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && + ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && + ( log2_max_frame_num_minus4 == rhs.log2_max_frame_num_minus4 ) && ( pic_order_cnt_type == rhs.pic_order_cnt_type ) && + ( offset_for_non_ref_pic == rhs.offset_for_non_ref_pic ) && ( offset_for_top_to_bottom_field == rhs.offset_for_top_to_bottom_field ) && + ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) && + ( num_ref_frames_in_pic_order_cnt_cycle == rhs.num_ref_frames_in_pic_order_cnt_cycle ) && ( max_num_ref_frames == rhs.max_num_ref_frames ) && + ( reserved1 == rhs.reserved1 ) && ( pic_width_in_mbs_minus1 == rhs.pic_width_in_mbs_minus1 ) && + ( pic_height_in_map_units_minus1 == rhs.pic_height_in_map_units_minus1 ) && ( frame_crop_left_offset == rhs.frame_crop_left_offset ) && + ( frame_crop_right_offset == rhs.frame_crop_right_offset ) && ( frame_crop_top_offset == rhs.frame_crop_top_offset ) && + ( frame_crop_bottom_offset == rhs.frame_crop_bottom_offset ) && ( reserved2 == rhs.reserved2 ) && + ( pOffsetForRefFrame == rhs.pOffsetForRefFrame ) && ( pScalingLists == rhs.pScalingLists ) && + ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ); + } + + bool operator!=( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc profile_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc::eBaseline; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc::e1_0; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc chroma_format_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc::eMonochrome; + uint8_t seq_parameter_set_id = {}; + uint8_t bit_depth_luma_minus8 = {}; + uint8_t bit_depth_chroma_minus8 = {}; + uint8_t log2_max_frame_num_minus4 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType pic_order_cnt_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType::e0; + int32_t offset_for_non_ref_pic = {}; + int32_t offset_for_top_to_bottom_field = {}; + uint8_t log2_max_pic_order_cnt_lsb_minus4 = {}; + uint8_t num_ref_frames_in_pic_order_cnt_cycle = {}; + uint8_t max_num_ref_frames = {}; + uint8_t reserved1 = {}; + uint32_t pic_width_in_mbs_minus1 = {}; + uint32_t pic_height_in_map_units_minus1 = {}; + uint32_t frame_crop_left_offset = {}; + uint32_t frame_crop_right_offset = {}; + uint32_t frame_crop_top_offset = {}; + uint32_t frame_crop_bottom_offset = {}; + uint32_t reserved2 = {}; + const int32_t * pOffsetForRefFrame = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SequenceParameterSetVui * pSequenceParameterSetVui = {}; + }; + + struct H264PpsFlags + { + using NativeType = StdVideoH264PpsFlags; + + operator StdVideoH264PpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264PpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( transform_8x8_mode_flag == rhs.transform_8x8_mode_flag ) && ( redundant_pic_cnt_present_flag == rhs.redundant_pic_cnt_present_flag ) && + ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) && + ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && ( weighted_pred_flag == rhs.weighted_pred_flag ) && + ( bottom_field_pic_order_in_frame_present_flag == rhs.bottom_field_pic_order_in_frame_present_flag ) && + ( entropy_coding_mode_flag == rhs.entropy_coding_mode_flag ) && ( pic_scaling_matrix_present_flag == rhs.pic_scaling_matrix_present_flag ); + } + + bool operator!=( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t transform_8x8_mode_flag : 1; + uint32_t redundant_pic_cnt_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t bottom_field_pic_order_in_frame_present_flag : 1; + uint32_t entropy_coding_mode_flag : 1; + uint32_t pic_scaling_matrix_present_flag : 1; + }; + + struct H264PictureParameterSet + { + using NativeType = StdVideoH264PictureParameterSet; + + operator StdVideoH264PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH264PictureParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) && + ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( weighted_bipred_idc == rhs.weighted_bipred_idc ) && + ( pic_init_qp_minus26 == rhs.pic_init_qp_minus26 ) && ( pic_init_qs_minus26 == rhs.pic_init_qs_minus26 ) && + ( chroma_qp_index_offset == rhs.chroma_qp_index_offset ) && ( second_chroma_qp_index_offset == rhs.second_chroma_qp_index_offset ) && + ( pScalingLists == rhs.pScalingLists ); + } + + bool operator!=( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PpsFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint8_t num_ref_idx_l0_default_active_minus1 = {}; + uint8_t num_ref_idx_l1_default_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc weighted_bipred_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc::eDefault; + int8_t pic_init_qp_minus26 = {}; + int8_t pic_init_qs_minus26 = {}; + int8_t chroma_qp_index_offset = {}; + int8_t second_chroma_qp_index_offset = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists * pScalingLists = {}; + }; + + //=== vulkan_video_codec_h264std_decode === + + struct DecodeH264PictureInfoFlags + { + using NativeType = StdVideoDecodeH264PictureInfoFlags; + + operator StdVideoDecodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( field_pic_flag == rhs.field_pic_flag ) && ( is_intra == rhs.is_intra ) && ( IdrPicFlag == rhs.IdrPicFlag ) && + ( bottom_field_flag == rhs.bottom_field_flag ) && ( is_reference == rhs.is_reference ) && + ( complementary_field_pair == rhs.complementary_field_pair ); + } + + bool operator!=( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t field_pic_flag : 1; + uint32_t is_intra : 1; + uint32_t IdrPicFlag : 1; + uint32_t bottom_field_flag : 1; + uint32_t is_reference : 1; + uint32_t complementary_field_pair : 1; + }; + + struct DecodeH264PictureInfo + { + using NativeType = StdVideoDecodeH264PictureInfo; + + operator StdVideoDecodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( frame_num == rhs.frame_num ) && ( idr_pic_id == rhs.idr_pic_id ) && + ( PicOrderCnt == rhs.PicOrderCnt ); + } + + bool operator!=( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264PictureInfoFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint16_t frame_num = {}; + uint16_t idr_pic_id = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D PicOrderCnt = {}; + }; + + struct DecodeH264ReferenceInfoFlags + { + using NativeType = StdVideoDecodeH264ReferenceInfoFlags; + + operator StdVideoDecodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( top_field_flag == rhs.top_field_flag ) && ( bottom_field_flag == rhs.bottom_field_flag ) && + ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( is_non_existing == rhs.is_non_existing ); + } + + bool operator!=( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t top_field_flag : 1; + uint32_t bottom_field_flag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t is_non_existing : 1; + }; + + struct DecodeH264ReferenceInfo + { + using NativeType = StdVideoDecodeH264ReferenceInfo; + + operator StdVideoDecodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( FrameNum == rhs.FrameNum ) && ( reserved == rhs.reserved ) && ( PicOrderCnt == rhs.PicOrderCnt ); + } + + bool operator!=( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264ReferenceInfoFlags flags = {}; + uint16_t FrameNum = {}; + uint16_t reserved = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D PicOrderCnt = {}; + }; + + //=== vulkan_video_codec_h264std_encode === + + struct EncodeH264WeightTableFlags + { + using NativeType = StdVideoEncodeH264WeightTableFlags; + + operator StdVideoEncodeH264WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTableFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && + ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); + } + + bool operator!=( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t luma_weight_l0_flag = {}; + uint32_t chroma_weight_l0_flag = {}; + uint32_t luma_weight_l1_flag = {}; + uint32_t chroma_weight_l1_flag = {}; + }; + + struct EncodeH264WeightTable + { + using NativeType = StdVideoEncodeH264WeightTable; + + operator StdVideoEncodeH264WeightTable const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264WeightTable &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && + ( chroma_log2_weight_denom == rhs.chroma_log2_weight_denom ) && ( luma_weight_l0 == rhs.luma_weight_l0 ) && + ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( chroma_weight_l0 == rhs.chroma_weight_l0 ) && ( chroma_offset_l0 == rhs.chroma_offset_l0 ) && + ( luma_weight_l1 == rhs.luma_weight_l1 ) && ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( chroma_weight_l1 == rhs.chroma_weight_l1 ) && + ( chroma_offset_l1 == rhs.chroma_offset_l1 ); + } + + bool operator!=( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTableFlags flags = {}; + uint8_t luma_log2_weight_denom = {}; + uint8_t chroma_log2_weight_denom = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D chroma_offset_l1 = {}; + }; + + struct EncodeH264SliceHeaderFlags + { + using NativeType = StdVideoEncodeH264SliceHeaderFlags; + + operator StdVideoEncodeH264SliceHeaderFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeaderFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( direct_spatial_mv_pred_flag == rhs.direct_spatial_mv_pred_flag ) && + ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t direct_spatial_mv_pred_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t reserved : 30; + }; + + struct EncodeH264PictureInfoFlags + { + using NativeType = StdVideoEncodeH264PictureInfoFlags; + + operator StdVideoEncodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( IdrPicFlag == rhs.IdrPicFlag ) && ( is_reference == rhs.is_reference ) && + ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && ( long_term_reference_flag == rhs.long_term_reference_flag ) && + ( adaptive_ref_pic_marking_mode_flag == rhs.adaptive_ref_pic_marking_mode_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t IdrPicFlag : 1; + uint32_t is_reference : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t long_term_reference_flag : 1; + uint32_t adaptive_ref_pic_marking_mode_flag : 1; + uint32_t reserved : 27; + }; + + struct EncodeH264ReferenceInfoFlags + { + using NativeType = StdVideoEncodeH264ReferenceInfoFlags; + + operator StdVideoEncodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t reserved : 31; + }; + + struct EncodeH264ReferenceListsInfoFlags + { + using NativeType = StdVideoEncodeH264ReferenceListsInfoFlags; + + operator StdVideoEncodeH264ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) && + ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; + }; + + struct EncodeH264RefListModEntry + { + using NativeType = StdVideoEncodeH264RefListModEntry; + + operator StdVideoEncodeH264RefListModEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefListModEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( modification_of_pic_nums_idc == rhs.modification_of_pic_nums_idc ) && ( abs_diff_pic_num_minus1 == rhs.abs_diff_pic_num_minus1 ) && + ( long_term_pic_num == rhs.long_term_pic_num ); + } + + bool operator!=( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc modification_of_pic_nums_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc::eShortTermSubtract; + uint16_t abs_diff_pic_num_minus1 = {}; + uint16_t long_term_pic_num = {}; + }; + + struct EncodeH264RefPicMarkingEntry + { + using NativeType = StdVideoEncodeH264RefPicMarkingEntry; + + operator StdVideoEncodeH264RefPicMarkingEntry const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264RefPicMarkingEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( operation == rhs.operation ) && ( difference_of_pic_nums_minus1 == rhs.difference_of_pic_nums_minus1 ) && + ( long_term_pic_num == rhs.long_term_pic_num ) && ( long_term_frame_idx == rhs.long_term_frame_idx ) && + ( max_long_term_frame_idx_plus1 == rhs.max_long_term_frame_idx_plus1 ); + } + + bool operator!=( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp operation = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp::eEnd; + uint16_t difference_of_pic_nums_minus1 = {}; + uint16_t long_term_pic_num = {}; + uint16_t long_term_frame_idx = {}; + uint16_t max_long_term_frame_idx_plus1 = {}; + }; + + struct EncodeH264ReferenceListsInfo + { + using NativeType = StdVideoEncodeH264ReferenceListsInfo; + + operator StdVideoEncodeH264ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && + ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) && + ( RefPicList1 == rhs.RefPicList1 ) && ( refList0ModOpCount == rhs.refList0ModOpCount ) && ( refList1ModOpCount == rhs.refList1ModOpCount ) && + ( refPicMarkingOpCount == rhs.refPicMarkingOpCount ) && ( reserved1 == rhs.reserved1 ) && + ( pRefList0ModOperations == rhs.pRefList0ModOperations ) && ( pRefList1ModOperations == rhs.pRefList1ModOperations ) && + ( pRefPicMarkingOperations == rhs.pRefPicMarkingOperations ); + } + + bool operator!=( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfoFlags flags = {}; + uint8_t num_ref_idx_l0_active_minus1 = {}; + uint8_t num_ref_idx_l1_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList1 = {}; + uint8_t refList0ModOpCount = {}; + uint8_t refList1ModOpCount = {}; + uint8_t refPicMarkingOpCount = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry * pRefList0ModOperations = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry * pRefList1ModOperations = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefPicMarkingEntry * pRefPicMarkingOperations = {}; + }; + + struct EncodeH264PictureInfo + { + using NativeType = StdVideoEncodeH264PictureInfo; + + operator StdVideoEncodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) && + ( idr_pic_id == rhs.idr_pic_id ) && ( primary_pic_type == rhs.primary_pic_type ) && ( frame_num == rhs.frame_num ) && + ( PicOrderCnt == rhs.PicOrderCnt ) && ( temporal_id == rhs.temporal_id ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ); + } + + bool operator!=( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264PictureInfoFlags flags = {}; + uint8_t seq_parameter_set_id = {}; + uint8_t pic_parameter_set_id = {}; + uint16_t idr_pic_id = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType primary_pic_type = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP; + uint32_t frame_num = {}; + int32_t PicOrderCnt = {}; + uint8_t temporal_id = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfo * pRefLists = {}; + }; + + struct EncodeH264ReferenceInfo + { + using NativeType = StdVideoEncodeH264ReferenceInfo; + + operator StdVideoEncodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( primary_pic_type == rhs.primary_pic_type ) && ( FrameNum == rhs.FrameNum ) && ( PicOrderCnt == rhs.PicOrderCnt ) && + ( long_term_pic_num == rhs.long_term_pic_num ) && ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( temporal_id == rhs.temporal_id ); + } + + bool operator!=( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType primary_pic_type = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP; + uint32_t FrameNum = {}; + int32_t PicOrderCnt = {}; + uint16_t long_term_pic_num = {}; + uint16_t long_term_frame_idx = {}; + uint8_t temporal_id = {}; + }; + + struct EncodeH264SliceHeader + { + using NativeType = StdVideoEncodeH264SliceHeader; + + operator StdVideoEncodeH264SliceHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH264SliceHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( first_mb_in_slice == rhs.first_mb_in_slice ) && ( slice_type == rhs.slice_type ) && + ( slice_alpha_c0_offset_div2 == rhs.slice_alpha_c0_offset_div2 ) && ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && + ( reserved1 == rhs.reserved1 ) && ( cabac_init_idc == rhs.cabac_init_idc ) && + ( disable_deblocking_filter_idc == rhs.disable_deblocking_filter_idc ) && ( pWeightTable == rhs.pWeightTable ); + } + + bool operator!=( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264SliceHeaderFlags flags = {}; + uint32_t first_mb_in_slice = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType::eP; + int8_t slice_alpha_c0_offset_div2 = {}; + int8_t slice_beta_offset_div2 = {}; + uint16_t reserved1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc cabac_init_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc::e0; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc disable_deblocking_filter_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc::eDisabled; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTable * pWeightTable = {}; + }; + + //=== vulkan_video_codec_h265std === + + struct H265DecPicBufMgr + { + using NativeType = StdVideoH265DecPicBufMgr; + + operator StdVideoH265DecPicBufMgr const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265DecPicBufMgr &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( max_latency_increase_plus1 == rhs.max_latency_increase_plus1 ) && ( max_dec_pic_buffering_minus1 == rhs.max_dec_pic_buffering_minus1 ) && + ( max_num_reorder_pics == rhs.max_num_reorder_pics ); + } + + bool operator!=( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_latency_increase_plus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_dec_pic_buffering_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D max_num_reorder_pics = {}; + }; + + struct H265SubLayerHrdParameters + { + using NativeType = StdVideoH265SubLayerHrdParameters; + + operator StdVideoH265SubLayerHrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SubLayerHrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && + ( cpb_size_du_value_minus1 == rhs.cpb_size_du_value_minus1 ) && ( bit_rate_du_value_minus1 == rhs.bit_rate_du_value_minus1 ) && + ( cbr_flag == rhs.cbr_flag ); + } + + bool operator!=( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_size_du_value_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D bit_rate_du_value_minus1 = {}; + uint32_t cbr_flag = {}; + }; + + struct H265HrdFlags + { + using NativeType = StdVideoH265HrdFlags; + + operator StdVideoH265HrdFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265HrdFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) && + ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ) && + ( sub_pic_hrd_params_present_flag == rhs.sub_pic_hrd_params_present_flag ) && + ( sub_pic_cpb_params_in_pic_timing_sei_flag == rhs.sub_pic_cpb_params_in_pic_timing_sei_flag ) && + ( fixed_pic_rate_general_flag == rhs.fixed_pic_rate_general_flag ) && ( fixed_pic_rate_within_cvs_flag == rhs.fixed_pic_rate_within_cvs_flag ) && + ( low_delay_hrd_flag == rhs.low_delay_hrd_flag ); + } + + bool operator!=( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + uint32_t sub_pic_hrd_params_present_flag : 1; + uint32_t sub_pic_cpb_params_in_pic_timing_sei_flag : 1; + uint32_t fixed_pic_rate_general_flag : 8; + uint32_t fixed_pic_rate_within_cvs_flag : 8; + uint32_t low_delay_hrd_flag : 8; + }; + + struct H265HrdParameters + { + using NativeType = StdVideoH265HrdParameters; + + operator StdVideoH265HrdParameters const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265HrdParameters &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( tick_divisor_minus2 == rhs.tick_divisor_minus2 ) && + ( du_cpb_removal_delay_increment_length_minus1 == rhs.du_cpb_removal_delay_increment_length_minus1 ) && + ( dpb_output_delay_du_length_minus1 == rhs.dpb_output_delay_du_length_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && + ( cpb_size_scale == rhs.cpb_size_scale ) && ( cpb_size_du_scale == rhs.cpb_size_du_scale ) && + ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) && + ( au_cpb_removal_delay_length_minus1 == rhs.au_cpb_removal_delay_length_minus1 ) && + ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && + ( elemental_duration_in_tc_minus1 == rhs.elemental_duration_in_tc_minus1 ) && ( reserved == rhs.reserved ) && + ( pSubLayerHrdParametersNal == rhs.pSubLayerHrdParametersNal ) && ( pSubLayerHrdParametersVcl == rhs.pSubLayerHrdParametersVcl ); + } + + bool operator!=( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdFlags flags = {}; + uint8_t tick_divisor_minus2 = {}; + uint8_t du_cpb_removal_delay_increment_length_minus1 = {}; + uint8_t dpb_output_delay_du_length_minus1 = {}; + uint8_t bit_rate_scale = {}; + uint8_t cpb_size_scale = {}; + uint8_t cpb_size_du_scale = {}; + uint8_t initial_cpb_removal_delay_length_minus1 = {}; + uint8_t au_cpb_removal_delay_length_minus1 = {}; + uint8_t dpb_output_delay_length_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cpb_cnt_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D elemental_duration_in_tc_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersNal = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersVcl = {}; + }; + + struct H265VpsFlags + { + using NativeType = StdVideoH265VpsFlags; + + operator StdVideoH265VpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265VpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( vps_temporal_id_nesting_flag == rhs.vps_temporal_id_nesting_flag ) && + ( vps_sub_layer_ordering_info_present_flag == rhs.vps_sub_layer_ordering_info_present_flag ) && + ( vps_timing_info_present_flag == rhs.vps_timing_info_present_flag ) && + ( vps_poc_proportional_to_timing_flag == rhs.vps_poc_proportional_to_timing_flag ); + } + + bool operator!=( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t vps_temporal_id_nesting_flag : 1; + uint32_t vps_sub_layer_ordering_info_present_flag : 1; + uint32_t vps_timing_info_present_flag : 1; + uint32_t vps_poc_proportional_to_timing_flag : 1; + }; + + struct H265ProfileTierLevelFlags + { + using NativeType = StdVideoH265ProfileTierLevelFlags; + + operator StdVideoH265ProfileTierLevelFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevelFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( general_tier_flag == rhs.general_tier_flag ) && ( general_progressive_source_flag == rhs.general_progressive_source_flag ) && + ( general_interlaced_source_flag == rhs.general_interlaced_source_flag ) && + ( general_non_packed_constraint_flag == rhs.general_non_packed_constraint_flag ) && + ( general_frame_only_constraint_flag == rhs.general_frame_only_constraint_flag ); + } + + bool operator!=( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t general_tier_flag : 1; + uint32_t general_progressive_source_flag : 1; + uint32_t general_interlaced_source_flag : 1; + uint32_t general_non_packed_constraint_flag : 1; + uint32_t general_frame_only_constraint_flag : 1; + }; + + struct H265ProfileTierLevel + { + using NativeType = StdVideoH265ProfileTierLevel; + + operator StdVideoH265ProfileTierLevel const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ProfileTierLevel &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( general_profile_idc == rhs.general_profile_idc ) && ( general_level_idc == rhs.general_level_idc ); + } + + bool operator!=( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevelFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc general_profile_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc::eMain; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc general_level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc::e1_0; + }; + + struct H265VideoParameterSet + { + using NativeType = StdVideoH265VideoParameterSet; + + operator StdVideoH265VideoParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265VideoParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( vps_video_parameter_set_id == rhs.vps_video_parameter_set_id ) && + ( vps_max_sub_layers_minus1 == rhs.vps_max_sub_layers_minus1 ) && ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && + ( vps_num_units_in_tick == rhs.vps_num_units_in_tick ) && ( vps_time_scale == rhs.vps_time_scale ) && + ( vps_num_ticks_poc_diff_one_minus1 == rhs.vps_num_ticks_poc_diff_one_minus1 ) && ( reserved3 == rhs.reserved3 ) && + ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pHrdParameters == rhs.pHrdParameters ) && ( pProfileTierLevel == rhs.pProfileTierLevel ); + } + + bool operator!=( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265VpsFlags flags = {}; + uint8_t vps_video_parameter_set_id = {}; + uint8_t vps_max_sub_layers_minus1 = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint32_t vps_num_units_in_tick = {}; + uint32_t vps_time_scale = {}; + uint32_t vps_num_ticks_poc_diff_one_minus1 = {}; + uint32_t reserved3 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr * pDecPicBufMgr = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters * pHrdParameters = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel * pProfileTierLevel = {}; + }; + + struct H265ScalingLists + { + using NativeType = StdVideoH265ScalingLists; + + operator StdVideoH265ScalingLists const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ScalingLists &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ) && ( ScalingList16x16 == rhs.ScalingList16x16 ) && + ( ScalingList32x32 == rhs.ScalingList32x32 ) && ( ScalingListDCCoef16x16 == rhs.ScalingListDCCoef16x16 ) && + ( ScalingListDCCoef32x32 == rhs.ScalingListDCCoef32x32 ); + } + + bool operator!=( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList4x4 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList8x8 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList16x16 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D + ScalingList32x32 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ScalingListDCCoef16x16 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ScalingListDCCoef32x32 = {}; + }; + + struct H265SpsVuiFlags + { + using NativeType = StdVideoH265SpsVuiFlags; + + operator StdVideoH265SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SpsVuiFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) && + ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) && + ( video_full_range_flag == rhs.video_full_range_flag ) && ( colour_description_present_flag == rhs.colour_description_present_flag ) && + ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && + ( neutral_chroma_indication_flag == rhs.neutral_chroma_indication_flag ) && ( field_seq_flag == rhs.field_seq_flag ) && + ( frame_field_info_present_flag == rhs.frame_field_info_present_flag ) && ( default_display_window_flag == rhs.default_display_window_flag ) && + ( vui_timing_info_present_flag == rhs.vui_timing_info_present_flag ) && + ( vui_poc_proportional_to_timing_flag == rhs.vui_poc_proportional_to_timing_flag ) && + ( vui_hrd_parameters_present_flag == rhs.vui_hrd_parameters_present_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) && + ( tiles_fixed_structure_flag == rhs.tiles_fixed_structure_flag ) && + ( motion_vectors_over_pic_boundaries_flag == rhs.motion_vectors_over_pic_boundaries_flag ) && + ( restricted_ref_pic_lists_flag == rhs.restricted_ref_pic_lists_flag ); + } + + bool operator!=( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t colour_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t neutral_chroma_indication_flag : 1; + uint32_t field_seq_flag : 1; + uint32_t frame_field_info_present_flag : 1; + uint32_t default_display_window_flag : 1; + uint32_t vui_timing_info_present_flag : 1; + uint32_t vui_poc_proportional_to_timing_flag : 1; + uint32_t vui_hrd_parameters_present_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t tiles_fixed_structure_flag : 1; + uint32_t motion_vectors_over_pic_boundaries_flag : 1; + uint32_t restricted_ref_pic_lists_flag : 1; + }; + + struct H265SequenceParameterSetVui + { + using NativeType = StdVideoH265SequenceParameterSetVui; + + operator StdVideoH265SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) && + ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) && + ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coeffs == rhs.matrix_coeffs ) && + ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) && + ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && ( def_disp_win_left_offset == rhs.def_disp_win_left_offset ) && + ( def_disp_win_right_offset == rhs.def_disp_win_right_offset ) && ( def_disp_win_top_offset == rhs.def_disp_win_top_offset ) && + ( def_disp_win_bottom_offset == rhs.def_disp_win_bottom_offset ) && ( vui_num_units_in_tick == rhs.vui_num_units_in_tick ) && + ( vui_time_scale == rhs.vui_time_scale ) && ( vui_num_ticks_poc_diff_one_minus1 == rhs.vui_num_ticks_poc_diff_one_minus1 ) && + ( min_spatial_segmentation_idc == rhs.min_spatial_segmentation_idc ) && ( reserved3 == rhs.reserved3 ) && + ( max_bytes_per_pic_denom == rhs.max_bytes_per_pic_denom ) && ( max_bits_per_min_cu_denom == rhs.max_bits_per_min_cu_denom ) && + ( log2_max_mv_length_horizontal == rhs.log2_max_mv_length_horizontal ) && ( log2_max_mv_length_vertical == rhs.log2_max_mv_length_vertical ) && + ( pHrdParameters == rhs.pHrdParameters ); + } + + bool operator!=( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsVuiFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc aspect_ratio_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc::eUnspecified; + uint16_t sar_width = {}; + uint16_t sar_height = {}; + uint8_t video_format = {}; + uint8_t colour_primaries = {}; + uint8_t transfer_characteristics = {}; + uint8_t matrix_coeffs = {}; + uint8_t chroma_sample_loc_type_top_field = {}; + uint8_t chroma_sample_loc_type_bottom_field = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint16_t def_disp_win_left_offset = {}; + uint16_t def_disp_win_right_offset = {}; + uint16_t def_disp_win_top_offset = {}; + uint16_t def_disp_win_bottom_offset = {}; + uint32_t vui_num_units_in_tick = {}; + uint32_t vui_time_scale = {}; + uint32_t vui_num_ticks_poc_diff_one_minus1 = {}; + uint16_t min_spatial_segmentation_idc = {}; + uint16_t reserved3 = {}; + uint8_t max_bytes_per_pic_denom = {}; + uint8_t max_bits_per_min_cu_denom = {}; + uint8_t log2_max_mv_length_horizontal = {}; + uint8_t log2_max_mv_length_vertical = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters * pHrdParameters = {}; + }; + + struct H265PredictorPaletteEntries + { + using NativeType = StdVideoH265PredictorPaletteEntries; + + operator StdVideoH265PredictorPaletteEntries const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PredictorPaletteEntries &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( PredictorPaletteEntries == rhs.PredictorPaletteEntries ); + } + + bool operator!=( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE:: + ArrayWrapper2D + PredictorPaletteEntries = {}; + }; + + struct H265SpsFlags + { + using NativeType = StdVideoH265SpsFlags; + + operator StdVideoH265SpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sps_temporal_id_nesting_flag == rhs.sps_temporal_id_nesting_flag ) && ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) && + ( conformance_window_flag == rhs.conformance_window_flag ) && + ( sps_sub_layer_ordering_info_present_flag == rhs.sps_sub_layer_ordering_info_present_flag ) && + ( scaling_list_enabled_flag == rhs.scaling_list_enabled_flag ) && + ( sps_scaling_list_data_present_flag == rhs.sps_scaling_list_data_present_flag ) && ( amp_enabled_flag == rhs.amp_enabled_flag ) && + ( sample_adaptive_offset_enabled_flag == rhs.sample_adaptive_offset_enabled_flag ) && ( pcm_enabled_flag == rhs.pcm_enabled_flag ) && + ( pcm_loop_filter_disabled_flag == rhs.pcm_loop_filter_disabled_flag ) && + ( long_term_ref_pics_present_flag == rhs.long_term_ref_pics_present_flag ) && + ( sps_temporal_mvp_enabled_flag == rhs.sps_temporal_mvp_enabled_flag ) && + ( strong_intra_smoothing_enabled_flag == rhs.strong_intra_smoothing_enabled_flag ) && + ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ) && ( sps_extension_present_flag == rhs.sps_extension_present_flag ) && + ( sps_range_extension_flag == rhs.sps_range_extension_flag ) && + ( transform_skip_rotation_enabled_flag == rhs.transform_skip_rotation_enabled_flag ) && + ( transform_skip_context_enabled_flag == rhs.transform_skip_context_enabled_flag ) && + ( implicit_rdpcm_enabled_flag == rhs.implicit_rdpcm_enabled_flag ) && ( explicit_rdpcm_enabled_flag == rhs.explicit_rdpcm_enabled_flag ) && + ( extended_precision_processing_flag == rhs.extended_precision_processing_flag ) && + ( intra_smoothing_disabled_flag == rhs.intra_smoothing_disabled_flag ) && + ( high_precision_offsets_enabled_flag == rhs.high_precision_offsets_enabled_flag ) && + ( persistent_rice_adaptation_enabled_flag == rhs.persistent_rice_adaptation_enabled_flag ) && + ( cabac_bypass_alignment_enabled_flag == rhs.cabac_bypass_alignment_enabled_flag ) && ( sps_scc_extension_flag == rhs.sps_scc_extension_flag ) && + ( sps_curr_pic_ref_enabled_flag == rhs.sps_curr_pic_ref_enabled_flag ) && ( palette_mode_enabled_flag == rhs.palette_mode_enabled_flag ) && + ( sps_palette_predictor_initializers_present_flag == rhs.sps_palette_predictor_initializers_present_flag ) && + ( intra_boundary_filtering_disabled_flag == rhs.intra_boundary_filtering_disabled_flag ); + } + + bool operator!=( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t sps_temporal_id_nesting_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t conformance_window_flag : 1; + uint32_t sps_sub_layer_ordering_info_present_flag : 1; + uint32_t scaling_list_enabled_flag : 1; + uint32_t sps_scaling_list_data_present_flag : 1; + uint32_t amp_enabled_flag : 1; + uint32_t sample_adaptive_offset_enabled_flag : 1; + uint32_t pcm_enabled_flag : 1; + uint32_t pcm_loop_filter_disabled_flag : 1; + uint32_t long_term_ref_pics_present_flag : 1; + uint32_t sps_temporal_mvp_enabled_flag : 1; + uint32_t strong_intra_smoothing_enabled_flag : 1; + uint32_t vui_parameters_present_flag : 1; + uint32_t sps_extension_present_flag : 1; + uint32_t sps_range_extension_flag : 1; + uint32_t transform_skip_rotation_enabled_flag : 1; + uint32_t transform_skip_context_enabled_flag : 1; + uint32_t implicit_rdpcm_enabled_flag : 1; + uint32_t explicit_rdpcm_enabled_flag : 1; + uint32_t extended_precision_processing_flag : 1; + uint32_t intra_smoothing_disabled_flag : 1; + uint32_t high_precision_offsets_enabled_flag : 1; + uint32_t persistent_rice_adaptation_enabled_flag : 1; + uint32_t cabac_bypass_alignment_enabled_flag : 1; + uint32_t sps_scc_extension_flag : 1; + uint32_t sps_curr_pic_ref_enabled_flag : 1; + uint32_t palette_mode_enabled_flag : 1; + uint32_t sps_palette_predictor_initializers_present_flag : 1; + uint32_t intra_boundary_filtering_disabled_flag : 1; + }; + + struct H265ShortTermRefPicSetFlags + { + using NativeType = StdVideoH265ShortTermRefPicSetFlags; + + operator StdVideoH265ShortTermRefPicSetFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSetFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( inter_ref_pic_set_prediction_flag == rhs.inter_ref_pic_set_prediction_flag ) && ( delta_rps_sign == rhs.delta_rps_sign ); + } + + bool operator!=( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t inter_ref_pic_set_prediction_flag : 1; + uint32_t delta_rps_sign : 1; + }; + + struct H265ShortTermRefPicSet + { + using NativeType = StdVideoH265ShortTermRefPicSet; + + operator StdVideoH265ShortTermRefPicSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265ShortTermRefPicSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( delta_idx_minus1 == rhs.delta_idx_minus1 ) && ( use_delta_flag == rhs.use_delta_flag ) && + ( abs_delta_rps_minus1 == rhs.abs_delta_rps_minus1 ) && ( used_by_curr_pic_flag == rhs.used_by_curr_pic_flag ) && + ( used_by_curr_pic_s0_flag == rhs.used_by_curr_pic_s0_flag ) && ( used_by_curr_pic_s1_flag == rhs.used_by_curr_pic_s1_flag ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( reserved3 == rhs.reserved3 ) && + ( num_negative_pics == rhs.num_negative_pics ) && ( num_positive_pics == rhs.num_positive_pics ) && + ( delta_poc_s0_minus1 == rhs.delta_poc_s0_minus1 ) && ( delta_poc_s1_minus1 == rhs.delta_poc_s1_minus1 ); + } + + bool operator!=( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSetFlags flags = {}; + uint32_t delta_idx_minus1 = {}; + uint16_t use_delta_flag = {}; + uint16_t abs_delta_rps_minus1 = {}; + uint16_t used_by_curr_pic_flag = {}; + uint16_t used_by_curr_pic_s0_flag = {}; + uint16_t used_by_curr_pic_s1_flag = {}; + uint16_t reserved1 = {}; + uint8_t reserved2 = {}; + uint8_t reserved3 = {}; + uint8_t num_negative_pics = {}; + uint8_t num_positive_pics = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_s0_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_s1_minus1 = {}; + }; + + struct H265LongTermRefPicsSps + { + using NativeType = StdVideoH265LongTermRefPicsSps; + + operator StdVideoH265LongTermRefPicsSps const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265LongTermRefPicsSps &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_by_curr_pic_lt_sps_flag == rhs.used_by_curr_pic_lt_sps_flag ) && ( lt_ref_pic_poc_lsb_sps == rhs.lt_ref_pic_poc_lsb_sps ); + } + + bool operator!=( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_by_curr_pic_lt_sps_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lt_ref_pic_poc_lsb_sps = {}; + }; + + struct H265SequenceParameterSet + { + using NativeType = StdVideoH265SequenceParameterSet; + + operator StdVideoH265SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265SequenceParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( chroma_format_idc == rhs.chroma_format_idc ) && ( pic_width_in_luma_samples == rhs.pic_width_in_luma_samples ) && + ( pic_height_in_luma_samples == rhs.pic_height_in_luma_samples ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( sps_max_sub_layers_minus1 == rhs.sps_max_sub_layers_minus1 ) && ( sps_seq_parameter_set_id == rhs.sps_seq_parameter_set_id ) && + ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) && + ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) && + ( log2_min_luma_coding_block_size_minus3 == rhs.log2_min_luma_coding_block_size_minus3 ) && + ( log2_diff_max_min_luma_coding_block_size == rhs.log2_diff_max_min_luma_coding_block_size ) && + ( log2_min_luma_transform_block_size_minus2 == rhs.log2_min_luma_transform_block_size_minus2 ) && + ( log2_diff_max_min_luma_transform_block_size == rhs.log2_diff_max_min_luma_transform_block_size ) && + ( max_transform_hierarchy_depth_inter == rhs.max_transform_hierarchy_depth_inter ) && + ( max_transform_hierarchy_depth_intra == rhs.max_transform_hierarchy_depth_intra ) && + ( num_short_term_ref_pic_sets == rhs.num_short_term_ref_pic_sets ) && ( num_long_term_ref_pics_sps == rhs.num_long_term_ref_pics_sps ) && + ( pcm_sample_bit_depth_luma_minus1 == rhs.pcm_sample_bit_depth_luma_minus1 ) && + ( pcm_sample_bit_depth_chroma_minus1 == rhs.pcm_sample_bit_depth_chroma_minus1 ) && + ( log2_min_pcm_luma_coding_block_size_minus3 == rhs.log2_min_pcm_luma_coding_block_size_minus3 ) && + ( log2_diff_max_min_pcm_luma_coding_block_size == rhs.log2_diff_max_min_pcm_luma_coding_block_size ) && ( reserved1 == rhs.reserved1 ) && + ( reserved2 == rhs.reserved2 ) && ( palette_max_size == rhs.palette_max_size ) && + ( delta_palette_max_predictor_size == rhs.delta_palette_max_predictor_size ) && + ( motion_vector_resolution_control_idc == rhs.motion_vector_resolution_control_idc ) && + ( sps_num_palette_predictor_initializers_minus1 == rhs.sps_num_palette_predictor_initializers_minus1 ) && + ( conf_win_left_offset == rhs.conf_win_left_offset ) && ( conf_win_right_offset == rhs.conf_win_right_offset ) && + ( conf_win_top_offset == rhs.conf_win_top_offset ) && ( conf_win_bottom_offset == rhs.conf_win_bottom_offset ) && + ( pProfileTierLevel == rhs.pProfileTierLevel ) && ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pScalingLists == rhs.pScalingLists ) && + ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPicsSps == rhs.pLongTermRefPicsSps ) && + ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ) && ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries ); + } + + bool operator!=( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc chroma_format_idc = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc::eMonochrome; + uint32_t pic_width_in_luma_samples = {}; + uint32_t pic_height_in_luma_samples = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t sps_max_sub_layers_minus1 = {}; + uint8_t sps_seq_parameter_set_id = {}; + uint8_t bit_depth_luma_minus8 = {}; + uint8_t bit_depth_chroma_minus8 = {}; + uint8_t log2_max_pic_order_cnt_lsb_minus4 = {}; + uint8_t log2_min_luma_coding_block_size_minus3 = {}; + uint8_t log2_diff_max_min_luma_coding_block_size = {}; + uint8_t log2_min_luma_transform_block_size_minus2 = {}; + uint8_t log2_diff_max_min_luma_transform_block_size = {}; + uint8_t max_transform_hierarchy_depth_inter = {}; + uint8_t max_transform_hierarchy_depth_intra = {}; + uint8_t num_short_term_ref_pic_sets = {}; + uint8_t num_long_term_ref_pics_sps = {}; + uint8_t pcm_sample_bit_depth_luma_minus1 = {}; + uint8_t pcm_sample_bit_depth_chroma_minus1 = {}; + uint8_t log2_min_pcm_luma_coding_block_size_minus3 = {}; + uint8_t log2_diff_max_min_pcm_luma_coding_block_size = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + uint8_t palette_max_size = {}; + uint8_t delta_palette_max_predictor_size = {}; + uint8_t motion_vector_resolution_control_idc = {}; + uint8_t sps_num_palette_predictor_initializers_minus1 = {}; + uint32_t conf_win_left_offset = {}; + uint32_t conf_win_right_offset = {}; + uint32_t conf_win_top_offset = {}; + uint32_t conf_win_bottom_offset = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel * pProfileTierLevel = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr * pDecPicBufMgr = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet * pShortTermRefPicSet = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LongTermRefPicsSps * pLongTermRefPicsSps = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SequenceParameterSetVui * pSequenceParameterSetVui = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries * pPredictorPaletteEntries = {}; + }; + + struct H265PpsFlags + { + using NativeType = StdVideoH265PpsFlags; + + operator StdVideoH265PpsFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PpsFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( dependent_slice_segments_enabled_flag == rhs.dependent_slice_segments_enabled_flag ) && + ( output_flag_present_flag == rhs.output_flag_present_flag ) && ( sign_data_hiding_enabled_flag == rhs.sign_data_hiding_enabled_flag ) && + ( cabac_init_present_flag == rhs.cabac_init_present_flag ) && ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) && + ( transform_skip_enabled_flag == rhs.transform_skip_enabled_flag ) && ( cu_qp_delta_enabled_flag == rhs.cu_qp_delta_enabled_flag ) && + ( pps_slice_chroma_qp_offsets_present_flag == rhs.pps_slice_chroma_qp_offsets_present_flag ) && + ( weighted_pred_flag == rhs.weighted_pred_flag ) && ( weighted_bipred_flag == rhs.weighted_bipred_flag ) && + ( transquant_bypass_enabled_flag == rhs.transquant_bypass_enabled_flag ) && ( tiles_enabled_flag == rhs.tiles_enabled_flag ) && + ( entropy_coding_sync_enabled_flag == rhs.entropy_coding_sync_enabled_flag ) && ( uniform_spacing_flag == rhs.uniform_spacing_flag ) && + ( loop_filter_across_tiles_enabled_flag == rhs.loop_filter_across_tiles_enabled_flag ) && + ( pps_loop_filter_across_slices_enabled_flag == rhs.pps_loop_filter_across_slices_enabled_flag ) && + ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && + ( deblocking_filter_override_enabled_flag == rhs.deblocking_filter_override_enabled_flag ) && + ( pps_deblocking_filter_disabled_flag == rhs.pps_deblocking_filter_disabled_flag ) && + ( pps_scaling_list_data_present_flag == rhs.pps_scaling_list_data_present_flag ) && + ( lists_modification_present_flag == rhs.lists_modification_present_flag ) && + ( slice_segment_header_extension_present_flag == rhs.slice_segment_header_extension_present_flag ) && + ( pps_extension_present_flag == rhs.pps_extension_present_flag ) && + ( cross_component_prediction_enabled_flag == rhs.cross_component_prediction_enabled_flag ) && + ( chroma_qp_offset_list_enabled_flag == rhs.chroma_qp_offset_list_enabled_flag ) && + ( pps_curr_pic_ref_enabled_flag == rhs.pps_curr_pic_ref_enabled_flag ) && + ( residual_adaptive_colour_transform_enabled_flag == rhs.residual_adaptive_colour_transform_enabled_flag ) && + ( pps_slice_act_qp_offsets_present_flag == rhs.pps_slice_act_qp_offsets_present_flag ) && + ( pps_palette_predictor_initializers_present_flag == rhs.pps_palette_predictor_initializers_present_flag ) && + ( monochrome_palette_flag == rhs.monochrome_palette_flag ) && ( pps_range_extension_flag == rhs.pps_range_extension_flag ); + } + + bool operator!=( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t dependent_slice_segments_enabled_flag : 1; + uint32_t output_flag_present_flag : 1; + uint32_t sign_data_hiding_enabled_flag : 1; + uint32_t cabac_init_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t transform_skip_enabled_flag : 1; + uint32_t cu_qp_delta_enabled_flag : 1; + uint32_t pps_slice_chroma_qp_offsets_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t weighted_bipred_flag : 1; + uint32_t transquant_bypass_enabled_flag : 1; + uint32_t tiles_enabled_flag : 1; + uint32_t entropy_coding_sync_enabled_flag : 1; + uint32_t uniform_spacing_flag : 1; + uint32_t loop_filter_across_tiles_enabled_flag : 1; + uint32_t pps_loop_filter_across_slices_enabled_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t deblocking_filter_override_enabled_flag : 1; + uint32_t pps_deblocking_filter_disabled_flag : 1; + uint32_t pps_scaling_list_data_present_flag : 1; + uint32_t lists_modification_present_flag : 1; + uint32_t slice_segment_header_extension_present_flag : 1; + uint32_t pps_extension_present_flag : 1; + uint32_t cross_component_prediction_enabled_flag : 1; + uint32_t chroma_qp_offset_list_enabled_flag : 1; + uint32_t pps_curr_pic_ref_enabled_flag : 1; + uint32_t residual_adaptive_colour_transform_enabled_flag : 1; + uint32_t pps_slice_act_qp_offsets_present_flag : 1; + uint32_t pps_palette_predictor_initializers_present_flag : 1; + uint32_t monochrome_palette_flag : 1; + uint32_t pps_range_extension_flag : 1; + }; + + struct H265PictureParameterSet + { + using NativeType = StdVideoH265PictureParameterSet; + + operator StdVideoH265PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoH265PictureParameterSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( num_extra_slice_header_bits == rhs.num_extra_slice_header_bits ) && + ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) && + ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( init_qp_minus26 == rhs.init_qp_minus26 ) && + ( diff_cu_qp_delta_depth == rhs.diff_cu_qp_delta_depth ) && ( pps_cb_qp_offset == rhs.pps_cb_qp_offset ) && + ( pps_cr_qp_offset == rhs.pps_cr_qp_offset ) && ( pps_beta_offset_div2 == rhs.pps_beta_offset_div2 ) && + ( pps_tc_offset_div2 == rhs.pps_tc_offset_div2 ) && ( log2_parallel_merge_level_minus2 == rhs.log2_parallel_merge_level_minus2 ) && + ( log2_max_transform_skip_block_size_minus2 == rhs.log2_max_transform_skip_block_size_minus2 ) && + ( diff_cu_chroma_qp_offset_depth == rhs.diff_cu_chroma_qp_offset_depth ) && + ( chroma_qp_offset_list_len_minus1 == rhs.chroma_qp_offset_list_len_minus1 ) && ( cb_qp_offset_list == rhs.cb_qp_offset_list ) && + ( cr_qp_offset_list == rhs.cr_qp_offset_list ) && ( log2_sao_offset_scale_luma == rhs.log2_sao_offset_scale_luma ) && + ( log2_sao_offset_scale_chroma == rhs.log2_sao_offset_scale_chroma ) && ( pps_act_y_qp_offset_plus5 == rhs.pps_act_y_qp_offset_plus5 ) && + ( pps_act_cb_qp_offset_plus5 == rhs.pps_act_cb_qp_offset_plus5 ) && ( pps_act_cr_qp_offset_plus3 == rhs.pps_act_cr_qp_offset_plus3 ) && + ( pps_num_palette_predictor_initializers == rhs.pps_num_palette_predictor_initializers ) && + ( luma_bit_depth_entry_minus8 == rhs.luma_bit_depth_entry_minus8 ) && ( chroma_bit_depth_entry_minus8 == rhs.chroma_bit_depth_entry_minus8 ) && + ( num_tile_columns_minus1 == rhs.num_tile_columns_minus1 ) && ( num_tile_rows_minus1 == rhs.num_tile_rows_minus1 ) && + ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( column_width_minus1 == rhs.column_width_minus1 ) && + ( row_height_minus1 == rhs.row_height_minus1 ) && ( reserved3 == rhs.reserved3 ) && ( pScalingLists == rhs.pScalingLists ) && + ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries ); + } + + bool operator!=( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PpsFlags flags = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t num_extra_slice_header_bits = {}; + uint8_t num_ref_idx_l0_default_active_minus1 = {}; + uint8_t num_ref_idx_l1_default_active_minus1 = {}; + int8_t init_qp_minus26 = {}; + uint8_t diff_cu_qp_delta_depth = {}; + int8_t pps_cb_qp_offset = {}; + int8_t pps_cr_qp_offset = {}; + int8_t pps_beta_offset_div2 = {}; + int8_t pps_tc_offset_div2 = {}; + uint8_t log2_parallel_merge_level_minus2 = {}; + uint8_t log2_max_transform_skip_block_size_minus2 = {}; + uint8_t diff_cu_chroma_qp_offset_depth = {}; + uint8_t chroma_qp_offset_list_len_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cb_qp_offset_list = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D cr_qp_offset_list = {}; + uint8_t log2_sao_offset_scale_luma = {}; + uint8_t log2_sao_offset_scale_chroma = {}; + int8_t pps_act_y_qp_offset_plus5 = {}; + int8_t pps_act_cb_qp_offset_plus5 = {}; + int8_t pps_act_cr_qp_offset_plus3 = {}; + uint8_t pps_num_palette_predictor_initializers = {}; + uint8_t luma_bit_depth_entry_minus8 = {}; + uint8_t chroma_bit_depth_entry_minus8 = {}; + uint8_t num_tile_columns_minus1 = {}; + uint8_t num_tile_rows_minus1 = {}; + uint8_t reserved1 = {}; + uint8_t reserved2 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D column_width_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D row_height_minus1 = {}; + uint32_t reserved3 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists * pScalingLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries * pPredictorPaletteEntries = {}; + }; + + //=== vulkan_video_codec_h265std_decode === + + struct DecodeH265PictureInfoFlags + { + using NativeType = StdVideoDecodeH265PictureInfoFlags; + + operator StdVideoDecodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( IrapPicFlag == rhs.IrapPicFlag ) && ( IdrPicFlag == rhs.IdrPicFlag ) && ( IsReference == rhs.IsReference ) && + ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ); + } + + bool operator!=( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t IrapPicFlag : 1; + uint32_t IdrPicFlag : 1; + uint32_t IsReference : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + }; + + struct DecodeH265PictureInfo + { + using NativeType = StdVideoDecodeH265PictureInfo; + + operator StdVideoDecodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( NumDeltaPocsOfRefRpsIdx == rhs.NumDeltaPocsOfRefRpsIdx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && + ( NumBitsForSTRefPicSetInSlice == rhs.NumBitsForSTRefPicSetInSlice ) && ( reserved == rhs.reserved ) && + ( RefPicSetStCurrBefore == rhs.RefPicSetStCurrBefore ) && ( RefPicSetStCurrAfter == rhs.RefPicSetStCurrAfter ) && + ( RefPicSetLtCurr == rhs.RefPicSetLtCurr ); + } + + bool operator!=( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265PictureInfoFlags flags = {}; + uint8_t sps_video_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t NumDeltaPocsOfRefRpsIdx = {}; + int32_t PicOrderCntVal = {}; + uint16_t NumBitsForSTRefPicSetInSlice = {}; + uint16_t reserved = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetStCurrBefore = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetStCurrAfter = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicSetLtCurr = {}; + }; + + struct DecodeH265ReferenceInfoFlags + { + using NativeType = StdVideoDecodeH265ReferenceInfoFlags; + + operator StdVideoDecodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference ); + } + + bool operator!=( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + }; + + struct DecodeH265ReferenceInfo + { + using NativeType = StdVideoDecodeH265ReferenceInfo; + + operator StdVideoDecodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoDecodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( PicOrderCntVal == rhs.PicOrderCntVal ); + } + + bool operator!=( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265ReferenceInfoFlags flags = {}; + int32_t PicOrderCntVal = {}; + }; + + //=== vulkan_video_codec_h265std_encode === + + struct EncodeH265WeightTableFlags + { + using NativeType = StdVideoEncodeH265WeightTableFlags; + + operator StdVideoEncodeH265WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTableFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) && + ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag ); + } + + bool operator!=( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint16_t luma_weight_l0_flag = {}; + uint16_t chroma_weight_l0_flag = {}; + uint16_t luma_weight_l1_flag = {}; + uint16_t chroma_weight_l1_flag = {}; + }; + + struct EncodeH265WeightTable + { + using NativeType = StdVideoEncodeH265WeightTable; + + operator StdVideoEncodeH265WeightTable const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265WeightTable &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) && + ( delta_chroma_log2_weight_denom == rhs.delta_chroma_log2_weight_denom ) && ( delta_luma_weight_l0 == rhs.delta_luma_weight_l0 ) && + ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( delta_chroma_weight_l0 == rhs.delta_chroma_weight_l0 ) && + ( delta_chroma_offset_l0 == rhs.delta_chroma_offset_l0 ) && ( delta_luma_weight_l1 == rhs.delta_luma_weight_l1 ) && + ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( delta_chroma_weight_l1 == rhs.delta_chroma_weight_l1 ) && + ( delta_chroma_offset_l1 == rhs.delta_chroma_offset_l1 ); + } + + bool operator!=( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTableFlags flags = {}; + uint8_t luma_log2_weight_denom = {}; + int8_t delta_chroma_log2_weight_denom = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_luma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_weight_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_offset_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_luma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D luma_offset_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_weight_l1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper2D delta_chroma_offset_l1 = {}; + }; + + struct EncodeH265SliceSegmentHeaderFlags + { + using NativeType = StdVideoEncodeH265SliceSegmentHeaderFlags; + + operator StdVideoEncodeH265SliceSegmentHeaderFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeaderFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( first_slice_segment_in_pic_flag == rhs.first_slice_segment_in_pic_flag ) && + ( dependent_slice_segment_flag == rhs.dependent_slice_segment_flag ) && ( slice_sao_luma_flag == rhs.slice_sao_luma_flag ) && + ( slice_sao_chroma_flag == rhs.slice_sao_chroma_flag ) && ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && + ( mvd_l1_zero_flag == rhs.mvd_l1_zero_flag ) && ( cabac_init_flag == rhs.cabac_init_flag ) && + ( cu_chroma_qp_offset_enabled_flag == rhs.cu_chroma_qp_offset_enabled_flag ) && + ( deblocking_filter_override_flag == rhs.deblocking_filter_override_flag ) && + ( slice_deblocking_filter_disabled_flag == rhs.slice_deblocking_filter_disabled_flag ) && + ( collocated_from_l0_flag == rhs.collocated_from_l0_flag ) && + ( slice_loop_filter_across_slices_enabled_flag == rhs.slice_loop_filter_across_slices_enabled_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t first_slice_segment_in_pic_flag : 1; + uint32_t dependent_slice_segment_flag : 1; + uint32_t slice_sao_luma_flag : 1; + uint32_t slice_sao_chroma_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t mvd_l1_zero_flag : 1; + uint32_t cabac_init_flag : 1; + uint32_t cu_chroma_qp_offset_enabled_flag : 1; + uint32_t deblocking_filter_override_flag : 1; + uint32_t slice_deblocking_filter_disabled_flag : 1; + uint32_t collocated_from_l0_flag : 1; + uint32_t slice_loop_filter_across_slices_enabled_flag : 1; + uint32_t reserved : 20; + }; + + struct EncodeH265SliceSegmentHeader + { + using NativeType = StdVideoEncodeH265SliceSegmentHeader; + + operator StdVideoEncodeH265SliceSegmentHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( slice_type == rhs.slice_type ) && ( slice_segment_address == rhs.slice_segment_address ) && + ( collocated_ref_idx == rhs.collocated_ref_idx ) && ( MaxNumMergeCand == rhs.MaxNumMergeCand ) && + ( slice_cb_qp_offset == rhs.slice_cb_qp_offset ) && ( slice_cr_qp_offset == rhs.slice_cr_qp_offset ) && + ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && ( slice_tc_offset_div2 == rhs.slice_tc_offset_div2 ) && + ( slice_act_y_qp_offset == rhs.slice_act_y_qp_offset ) && ( slice_act_cb_qp_offset == rhs.slice_act_cb_qp_offset ) && + ( slice_act_cr_qp_offset == rhs.slice_act_cr_qp_offset ) && ( reserved1 == rhs.reserved1 ) && ( pWeightTable == rhs.pWeightTable ); + } + + bool operator!=( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265SliceSegmentHeaderFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType::eB; + uint32_t slice_segment_address = {}; + uint8_t collocated_ref_idx = {}; + uint8_t MaxNumMergeCand = {}; + int8_t slice_cb_qp_offset = {}; + int8_t slice_cr_qp_offset = {}; + int8_t slice_beta_offset_div2 = {}; + int8_t slice_tc_offset_div2 = {}; + int8_t slice_act_y_qp_offset = {}; + int8_t slice_act_cb_qp_offset = {}; + int8_t slice_act_cr_qp_offset = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTable * pWeightTable = {}; + }; + + struct EncodeH265ReferenceListsInfoFlags + { + using NativeType = StdVideoEncodeH265ReferenceListsInfoFlags; + + operator StdVideoEncodeH265ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) && + ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; + }; + + struct EncodeH265ReferenceListsInfo + { + using NativeType = StdVideoEncodeH265ReferenceListsInfo; + + operator StdVideoEncodeH265ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) && + ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) && + ( RefPicList1 == rhs.RefPicList1 ) && ( list_entry_l0 == rhs.list_entry_l0 ) && ( list_entry_l1 == rhs.list_entry_l1 ); + } + + bool operator!=( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfoFlags flags = {}; + uint8_t num_ref_idx_l0_active_minus1 = {}; + uint8_t num_ref_idx_l1_active_minus1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D RefPicList1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D list_entry_l0 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D list_entry_l1 = {}; + }; + + struct EncodeH265PictureInfoFlags + { + using NativeType = StdVideoEncodeH265PictureInfoFlags; + + operator StdVideoEncodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( is_reference == rhs.is_reference ) && ( IrapPicFlag == rhs.IrapPicFlag ) && + ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( discardable_flag == rhs.discardable_flag ) && + ( cross_layer_bla_flag == rhs.cross_layer_bla_flag ) && ( pic_output_flag == rhs.pic_output_flag ) && + ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && + ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ) && + ( slice_temporal_mvp_enabled_flag == rhs.slice_temporal_mvp_enabled_flag ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t is_reference : 1; + uint32_t IrapPicFlag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t discardable_flag : 1; + uint32_t cross_layer_bla_flag : 1; + uint32_t pic_output_flag : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + uint32_t slice_temporal_mvp_enabled_flag : 1; + uint32_t reserved : 23; + }; + + struct EncodeH265SliceSegmentLongTermRefPics + { + using NativeType = StdVideoEncodeH265SliceSegmentLongTermRefPics; + + operator StdVideoEncodeH265SliceSegmentLongTermRefPics const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265SliceSegmentLongTermRefPics &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265SliceSegmentLongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( num_long_term_sps == rhs.num_long_term_sps ) && ( num_long_term_pics == rhs.num_long_term_pics ) && ( lt_idx_sps == rhs.lt_idx_sps ) && + ( poc_lsb_lt == rhs.poc_lsb_lt ) && ( used_by_curr_pic_lt_flag == rhs.used_by_curr_pic_lt_flag ) && + ( delta_poc_msb_present_flag == rhs.delta_poc_msb_present_flag ) && ( delta_poc_msb_cycle_lt == rhs.delta_poc_msb_cycle_lt ); + } + + bool operator!=( EncodeH265SliceSegmentLongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t num_long_term_sps = {}; + uint8_t num_long_term_pics = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D lt_idx_sps = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D poc_lsb_lt = {}; + uint16_t used_by_curr_pic_lt_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_msb_present_flag = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_poc_msb_cycle_lt = {}; + }; + + struct EncodeH265PictureInfo + { + using NativeType = StdVideoEncodeH265PictureInfo; + + operator StdVideoEncodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) && + ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) && + ( short_term_ref_pic_set_idx == rhs.short_term_ref_pic_set_idx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && + ( TemporalId == rhs.TemporalId ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ) && + ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPics == rhs.pLongTermRefPics ); + } + + bool operator!=( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265PictureInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP; + uint8_t sps_video_parameter_set_id = {}; + uint8_t pps_seq_parameter_set_id = {}; + uint8_t pps_pic_parameter_set_id = {}; + uint8_t short_term_ref_pic_set_idx = {}; + int32_t PicOrderCntVal = {}; + uint8_t TemporalId = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfo * pRefLists = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet * pShortTermRefPicSet = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265SliceSegmentLongTermRefPics * pLongTermRefPics = {}; + }; + + struct EncodeH265ReferenceInfoFlags + { + using NativeType = StdVideoEncodeH265ReferenceInfoFlags; + + operator StdVideoEncodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference ) && + ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + uint32_t reserved : 30; + }; + + struct EncodeH265ReferenceInfo + { + using NativeType = StdVideoEncodeH265ReferenceInfo; + + operator StdVideoEncodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && ( TemporalId == rhs.TemporalId ); + } + + bool operator!=( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP; + int32_t PicOrderCntVal = {}; + uint8_t TemporalId = {}; + }; + + } // namespace VULKAN_HPP_VIDEO_NAMESPACE +} // namespace VULKAN_HPP_NAMESPACE +#endif diff --git a/registry/generator.py b/registry/generator.py index 56a6310..669e5d6 100644 --- a/registry/generator.py +++ b/registry/generator.py @@ -166,6 +166,7 @@ class GeneratorOptions: reparentEnums=True, sortProcedure=regSortFeatures, requireCommandAliases=False, + requireDepends=True, ): """Constructor. @@ -208,6 +209,11 @@ class GeneratorOptions: or being complete. Defaults to True. - sortProcedure - takes a list of FeatureInfo objects and sorts them in place to a preferred order in the generated output. + - requireCommandAliases - if True, treat command aliases + as required dependencies. + - requireDepends - whether to follow API dependencies when emitting + APIs. + Default is - core API versions - Khronos (ARB/KHR/OES) extensions @@ -295,6 +301,9 @@ class GeneratorOptions: """True if alias= attributes of tags are transitively required.""" + self.requireDepends = requireDepends + """True if dependencies of API tags are transitively required.""" + def emptyRegex(self, pat): """Substitute a regular expression which matches no version or extension names for None or the empty string.""" diff --git a/registry/genvk.py b/registry/genvk.py index c6cfc86..9cfabd4 100755 --- a/registry/genvk.py +++ b/registry/genvk.py @@ -836,8 +836,9 @@ def makeGenOpts(args): ] # Video extension 'Std' interfaces, each in its own header files - # These are not Vulkan extensions, or a part of the Vulkan API at all, - # but are treated in a similar fashion for generation purposes. + # These are not Vulkan extensions, or a part of the Vulkan API at all. + # They are treated in a similar fashion for generation purposes, but + # all required APIs for each interface must be explicitly required. # # Each element of the videoStd[] array is an extension name defining an # interface, and is also the basis for the generated header file name. @@ -852,7 +853,8 @@ def makeGenOpts(args): 'vulkan_video_codec_h265std_encode', ] - addExtensionRE = makeREstring(videoStd) + # Unused at present + # addExtensionRE = makeREstring(videoStd) for codec in videoStd: headername = f'{codec}.h' @@ -869,10 +871,11 @@ def makeGenOpts(args): profile = None, versions = None, emitversions = None, - defaultExtensions = defaultAPIName, - addExtensions = addExtensionRE, + defaultExtensions = None, + addExtensions = emitExtensionRE, removeExtensions = None, emitExtensions = emitExtensionRE, + requireDepends = False, prefixText = prefixStrings + vkPrefixStrings, genFuncPointers = False, protectFile = protectFile, diff --git a/registry/reg.py b/registry/reg.py index 65a3ac2..4b5a80f 100644 --- a/registry/reg.py +++ b/registry/reg.py @@ -1241,15 +1241,23 @@ class Registry: if v.get('struct'): self.typedict[v.get('struct')].removedValidity.append(copy.deepcopy(v)) - def generateFeature(self, fname, ftype, dictionary): + def generateFeature(self, fname, ftype, dictionary, explicit=False): """Generate a single type / enum group / enum / command, and all its dependencies as needed. - fname - name of feature (``/``/``) - ftype - type of feature, 'type' | 'enum' | 'command' - - dictionary - of *Info objects - self.{type|enum|cmd}dict""" + - dictionary - of *Info objects - self.{type|enum|cmd}dict + - explicit - True if this is explicitly required by the top-level + XML tag, False if it is a dependency of an explicit + requirement.""" self.gen.logMsg('diag', 'generateFeature: generating', ftype, fname) + + if not (explicit or self.genOpts.requireDepends): + self.gen.logMsg('diag', 'generateFeature: NOT generating', ftype, fname, 'because generator does not require dependencies') + return + f = self.lookupElementInfo(fname, dictionary) if f is None: # No such feature. This is an error, but reported earlier @@ -1426,16 +1434,16 @@ class Registry: # Loop over all features inside all tags. for features in interface.findall('require'): for t in features.findall('type'): - self.generateFeature(t.get('name'), 'type', self.typedict) + self.generateFeature(t.get('name'), 'type', self.typedict, explicit=True) for e in features.findall('enum'): # If this is an enum extending an enumerated type, do not # generate it - this has already been done in reg.parseTree, # by copying this element into the enumerated type. enumextends = e.get('extends') if not enumextends: - self.generateFeature(e.get('name'), 'enum', self.enumdict) + self.generateFeature(e.get('name'), 'enum', self.enumdict, explicit=True) for c in features.findall('command'): - self.generateFeature(c.get('name'), 'command', self.cmddict) + self.generateFeature(c.get('name'), 'command', self.cmddict, explicit=True) def generateSpirv(self, spirv, dictionary): if spirv is None: diff --git a/registry/spec_tools/util.py b/registry/spec_tools/util.py index a607dd2..bf25845 100644 --- a/registry/spec_tools/util.py +++ b/registry/spec_tools/util.py @@ -1,5 +1,5 @@ """Utility functions not closely tied to other spec_tools types.""" -# Copyright 2018-2019 Collabora, Ltd. +# Copyright (c) 2018-2019 Collabora, Ltd. # Copyright 2013-2023 The Khronos Group Inc. # # SPDX-License-Identifier: Apache-2.0 diff --git a/registry/validusage.json b/registry/validusage.json index 9e9106c..f549b77 100644 --- a/registry/validusage.json +++ b/registry/validusage.json @@ -1,9 +1,9 @@ { "version info": { "schema version": 2, - "api version": "1.3.257", - "comment": "from git branch: github-main commit: 2b7d9c9f65cde43580573a677490c3c7d099c09f", - "date": "2023-07-07 12:31:11Z" + "api version": "1.3.258", + "comment": "from git branch: github-main commit: 2c1e07a72b43f53cc7866c55ae32afe97d933621", + "date": "2023-07-21 09:57:58Z" }, "validation": { "vkGetInstanceProcAddr": { @@ -278,7 +278,7 @@ }, { "vuid": "VUID-VkPhysicalDeviceProperties2-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceAccelerationStructurePropertiesKHR, VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceCooperativeMatrixPropertiesKHR, VkPhysicalDeviceCooperativeMatrixPropertiesNV, VkPhysicalDeviceCopyMemoryIndirectPropertiesNV, VkPhysicalDeviceCustomBorderColorPropertiesEXT, VkPhysicalDeviceDepthStencilResolveProperties, VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT, VkPhysicalDeviceDescriptorBufferPropertiesEXT, VkPhysicalDeviceDescriptorIndexingProperties, VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceDisplacementMicromapPropertiesNV, VkPhysicalDeviceDriverProperties, VkPhysicalDeviceDrmPropertiesEXT, VkPhysicalDeviceExtendedDynamicState3PropertiesEXT, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceFloatControlsProperties, VkPhysicalDeviceFragmentDensityMap2PropertiesEXT, VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM, VkPhysicalDeviceFragmentDensityMapPropertiesEXT, VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR, VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV, VkPhysicalDeviceFragmentShadingRatePropertiesKHR, VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT, VkPhysicalDeviceIDProperties, VkPhysicalDeviceImageProcessingPropertiesQCOM, VkPhysicalDeviceInlineUniformBlockProperties, VkPhysicalDeviceLineRasterizationPropertiesEXT, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMaintenance4Properties, VkPhysicalDeviceMemoryDecompressionPropertiesNV, VkPhysicalDeviceMeshShaderPropertiesEXT, VkPhysicalDeviceMeshShaderPropertiesNV, VkPhysicalDeviceMultiDrawPropertiesEXT, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDeviceOpacityMicromapPropertiesEXT, VkPhysicalDeviceOpticalFlowPropertiesNV, VkPhysicalDevicePCIBusInfoPropertiesEXT, VkPhysicalDevicePerformanceQueryPropertiesKHR, VkPhysicalDevicePipelineRobustnessPropertiesEXT, VkPhysicalDevicePointClippingProperties, VkPhysicalDevicePortabilitySubsetPropertiesKHR, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDeviceProvokingVertexPropertiesEXT, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV, VkPhysicalDeviceRayTracingPipelinePropertiesKHR, VkPhysicalDeviceRayTracingPropertiesNV, VkPhysicalDeviceRobustness2PropertiesEXT, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxProperties, VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM, VkPhysicalDeviceShaderCoreProperties2AMD, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceShaderCorePropertiesARM, VkPhysicalDeviceShaderIntegerDotProductProperties, VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT, VkPhysicalDeviceShaderObjectPropertiesEXT, VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, VkPhysicalDeviceShaderTileImagePropertiesEXT, VkPhysicalDeviceShadingRateImagePropertiesNV, VkPhysicalDeviceSubgroupProperties, VkPhysicalDeviceSubgroupSizeControlProperties, VkPhysicalDeviceSubpassShadingPropertiesHUAWEI, VkPhysicalDeviceTexelBufferAlignmentProperties, VkPhysicalDeviceTimelineSemaphoreProperties, VkPhysicalDeviceTransformFeedbackPropertiesEXT, VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT, VkPhysicalDeviceVulkan11Properties, VkPhysicalDeviceVulkan12Properties, or VkPhysicalDeviceVulkan13Properties" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceAccelerationStructurePropertiesKHR, VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceCooperativeMatrixPropertiesKHR, VkPhysicalDeviceCooperativeMatrixPropertiesNV, VkPhysicalDeviceCopyMemoryIndirectPropertiesNV, VkPhysicalDeviceCustomBorderColorPropertiesEXT, VkPhysicalDeviceDepthStencilResolveProperties, VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT, VkPhysicalDeviceDescriptorBufferPropertiesEXT, VkPhysicalDeviceDescriptorIndexingProperties, VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceDisplacementMicromapPropertiesNV, VkPhysicalDeviceDriverProperties, VkPhysicalDeviceDrmPropertiesEXT, VkPhysicalDeviceExtendedDynamicState3PropertiesEXT, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceFloatControlsProperties, VkPhysicalDeviceFragmentDensityMap2PropertiesEXT, VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM, VkPhysicalDeviceFragmentDensityMapPropertiesEXT, VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR, VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV, VkPhysicalDeviceFragmentShadingRatePropertiesKHR, VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT, VkPhysicalDeviceHostImageCopyPropertiesEXT, VkPhysicalDeviceIDProperties, VkPhysicalDeviceImageProcessingPropertiesQCOM, VkPhysicalDeviceInlineUniformBlockProperties, VkPhysicalDeviceLineRasterizationPropertiesEXT, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMaintenance4Properties, VkPhysicalDeviceMemoryDecompressionPropertiesNV, VkPhysicalDeviceMeshShaderPropertiesEXT, VkPhysicalDeviceMeshShaderPropertiesNV, VkPhysicalDeviceMultiDrawPropertiesEXT, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDeviceOpacityMicromapPropertiesEXT, VkPhysicalDeviceOpticalFlowPropertiesNV, VkPhysicalDevicePCIBusInfoPropertiesEXT, VkPhysicalDevicePerformanceQueryPropertiesKHR, VkPhysicalDevicePipelineRobustnessPropertiesEXT, VkPhysicalDevicePointClippingProperties, VkPhysicalDevicePortabilitySubsetPropertiesKHR, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDeviceProvokingVertexPropertiesEXT, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV, VkPhysicalDeviceRayTracingPipelinePropertiesKHR, VkPhysicalDeviceRayTracingPropertiesNV, VkPhysicalDeviceRobustness2PropertiesEXT, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxProperties, VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM, VkPhysicalDeviceShaderCoreProperties2AMD, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceShaderCorePropertiesARM, VkPhysicalDeviceShaderIntegerDotProductProperties, VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT, VkPhysicalDeviceShaderObjectPropertiesEXT, VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, VkPhysicalDeviceShaderTileImagePropertiesEXT, VkPhysicalDeviceShadingRateImagePropertiesNV, VkPhysicalDeviceSubgroupProperties, VkPhysicalDeviceSubgroupSizeControlProperties, VkPhysicalDeviceSubpassShadingPropertiesHUAWEI, VkPhysicalDeviceTexelBufferAlignmentProperties, VkPhysicalDeviceTimelineSemaphoreProperties, VkPhysicalDeviceTransformFeedbackPropertiesEXT, VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT, VkPhysicalDeviceVulkan11Properties, VkPhysicalDeviceVulkan12Properties, or VkPhysicalDeviceVulkan13Properties" }, { "vuid": "VUID-VkPhysicalDeviceProperties2-sType-unique", @@ -678,7 +678,7 @@ }, { "vuid": "VUID-VkDeviceCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceDeviceMemoryReportCreateInfoEXT, VkDeviceDiagnosticsConfigCreateInfoNV, VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkDevicePrivateDataCreateInfo, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice4444FormatsFeaturesEXT, VkPhysicalDevice8BitStorageFeatures, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceAccelerationStructureFeaturesKHR, VkPhysicalDeviceAddressBindingReportFeaturesEXT, VkPhysicalDeviceAmigoProfilingFeaturesSEC, VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT, VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBorderColorSwizzleFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeatures, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceColorWriteEnableFeaturesEXT, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesKHR, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCopyMemoryIndirectFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceCustomBorderColorFeaturesEXT, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthBiasControlFeaturesEXT, VkPhysicalDeviceDepthClampZeroOneFeaturesEXT, VkPhysicalDeviceDepthClipControlFeaturesEXT, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorBufferFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeatures, VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, VkPhysicalDeviceDeviceMemoryReportFeaturesEXT, VkPhysicalDeviceDiagnosticsConfigFeaturesNV, VkPhysicalDeviceDisplacementMicromapFeaturesNV, VkPhysicalDeviceDynamicRenderingFeatures, VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceExtendedDynamicState2FeaturesEXT, VkPhysicalDeviceExtendedDynamicState3FeaturesEXT, VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, VkPhysicalDeviceExternalMemoryRDMAFeaturesNV, VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX, VkPhysicalDeviceFaultFeaturesEXT, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMap2FeaturesEXT, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM, VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV, VkPhysicalDeviceFragmentShadingRateFeaturesKHR, VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR, VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, VkPhysicalDeviceHostQueryResetFeatures, VkPhysicalDeviceImage2DViewOf3DFeaturesEXT, VkPhysicalDeviceImageCompressionControlFeaturesEXT, VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, VkPhysicalDeviceImageProcessingFeaturesQCOM, VkPhysicalDeviceImageRobustnessFeatures, VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT, VkPhysicalDeviceImageViewMinLodFeaturesEXT, VkPhysicalDeviceImagelessFramebufferFeatures, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInheritedViewportScissorFeaturesNV, VkPhysicalDeviceInlineUniformBlockFeatures, VkPhysicalDeviceInvocationMaskFeaturesHUAWEI, VkPhysicalDeviceLegacyDitheringFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceLinearColorAttachmentFeaturesNV, VkPhysicalDeviceMaintenance4Features, VkPhysicalDeviceMemoryDecompressionFeaturesNV, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiDrawFeaturesEXT, VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM, VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT, VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT, VkPhysicalDeviceOpacityMicromapFeaturesEXT, VkPhysicalDeviceOpticalFlowFeaturesNV, VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineCreationCacheControlFeatures, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT, VkPhysicalDevicePipelinePropertiesFeaturesEXT, VkPhysicalDevicePipelineProtectedAccessFeaturesEXT, VkPhysicalDevicePipelineRobustnessFeaturesEXT, VkPhysicalDevicePortabilitySubsetFeaturesKHR, VkPhysicalDevicePresentBarrierFeaturesNV, VkPhysicalDevicePresentIdFeaturesKHR, VkPhysicalDevicePresentWaitFeaturesKHR, VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, VkPhysicalDevicePrivateDataFeatures, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceProvokingVertexFeaturesEXT, VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT, VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, VkPhysicalDeviceRayQueryFeaturesKHR, VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV, VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR, VkPhysicalDeviceRayTracingMotionBlurFeaturesNV, VkPhysicalDeviceRayTracingPipelineFeaturesKHR, VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceRobustness2FeaturesEXT, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeatures, VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT, VkPhysicalDeviceShaderAtomicFloatFeaturesEXT, VkPhysicalDeviceShaderAtomicInt64Features, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM, VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, VkPhysicalDeviceShaderFloat16Int8Features, VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerDotProductFeatures, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT, VkPhysicalDeviceShaderObjectFeaturesEXT, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, VkPhysicalDeviceShaderTerminateInvocationFeatures, VkPhysicalDeviceShaderTileImageFeaturesEXT, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeatures, VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT, VkPhysicalDeviceSubpassShadingFeaturesHUAWEI, VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT, VkPhysicalDeviceSynchronization2Features, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeatures, VkPhysicalDeviceTilePropertiesFeaturesQCOM, VkPhysicalDeviceTimelineSemaphoreFeatures, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeatures, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT, VkPhysicalDeviceVulkan11Features, VkPhysicalDeviceVulkan12Features, VkPhysicalDeviceVulkan13Features, VkPhysicalDeviceVulkanMemoryModelFeatures, VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, VkPhysicalDeviceYcbcrImageArraysFeaturesEXT, or VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceDeviceMemoryReportCreateInfoEXT, VkDeviceDiagnosticsConfigCreateInfoNV, VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkDevicePrivateDataCreateInfo, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice4444FormatsFeaturesEXT, VkPhysicalDevice8BitStorageFeatures, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceAccelerationStructureFeaturesKHR, VkPhysicalDeviceAddressBindingReportFeaturesEXT, VkPhysicalDeviceAmigoProfilingFeaturesSEC, VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT, VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBorderColorSwizzleFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeatures, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceColorWriteEnableFeaturesEXT, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesKHR, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCopyMemoryIndirectFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceCustomBorderColorFeaturesEXT, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthBiasControlFeaturesEXT, VkPhysicalDeviceDepthClampZeroOneFeaturesEXT, VkPhysicalDeviceDepthClipControlFeaturesEXT, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorBufferFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeatures, VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV, VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, VkPhysicalDeviceDeviceMemoryReportFeaturesEXT, VkPhysicalDeviceDiagnosticsConfigFeaturesNV, VkPhysicalDeviceDisplacementMicromapFeaturesNV, VkPhysicalDeviceDynamicRenderingFeatures, VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceExtendedDynamicState2FeaturesEXT, VkPhysicalDeviceExtendedDynamicState3FeaturesEXT, VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, VkPhysicalDeviceExternalMemoryRDMAFeaturesNV, VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX, VkPhysicalDeviceFaultFeaturesEXT, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMap2FeaturesEXT, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM, VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV, VkPhysicalDeviceFragmentShadingRateFeaturesKHR, VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR, VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, VkPhysicalDeviceHostImageCopyFeaturesEXT, VkPhysicalDeviceHostQueryResetFeatures, VkPhysicalDeviceImage2DViewOf3DFeaturesEXT, VkPhysicalDeviceImageCompressionControlFeaturesEXT, VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, VkPhysicalDeviceImageProcessingFeaturesQCOM, VkPhysicalDeviceImageRobustnessFeatures, VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT, VkPhysicalDeviceImageViewMinLodFeaturesEXT, VkPhysicalDeviceImagelessFramebufferFeatures, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInheritedViewportScissorFeaturesNV, VkPhysicalDeviceInlineUniformBlockFeatures, VkPhysicalDeviceInvocationMaskFeaturesHUAWEI, VkPhysicalDeviceLegacyDitheringFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceLinearColorAttachmentFeaturesNV, VkPhysicalDeviceMaintenance4Features, VkPhysicalDeviceMemoryDecompressionFeaturesNV, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiDrawFeaturesEXT, VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM, VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT, VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT, VkPhysicalDeviceOpacityMicromapFeaturesEXT, VkPhysicalDeviceOpticalFlowFeaturesNV, VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineCreationCacheControlFeatures, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT, VkPhysicalDevicePipelinePropertiesFeaturesEXT, VkPhysicalDevicePipelineProtectedAccessFeaturesEXT, VkPhysicalDevicePipelineRobustnessFeaturesEXT, VkPhysicalDevicePortabilitySubsetFeaturesKHR, VkPhysicalDevicePresentBarrierFeaturesNV, VkPhysicalDevicePresentIdFeaturesKHR, VkPhysicalDevicePresentWaitFeaturesKHR, VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, VkPhysicalDevicePrivateDataFeatures, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceProvokingVertexFeaturesEXT, VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT, VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, VkPhysicalDeviceRayQueryFeaturesKHR, VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV, VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR, VkPhysicalDeviceRayTracingMotionBlurFeaturesNV, VkPhysicalDeviceRayTracingPipelineFeaturesKHR, VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceRobustness2FeaturesEXT, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeatures, VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT, VkPhysicalDeviceShaderAtomicFloatFeaturesEXT, VkPhysicalDeviceShaderAtomicInt64Features, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM, VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, VkPhysicalDeviceShaderFloat16Int8Features, VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerDotProductFeatures, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT, VkPhysicalDeviceShaderObjectFeaturesEXT, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, VkPhysicalDeviceShaderTerminateInvocationFeatures, VkPhysicalDeviceShaderTileImageFeaturesEXT, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeatures, VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT, VkPhysicalDeviceSubpassShadingFeaturesHUAWEI, VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT, VkPhysicalDeviceSynchronization2Features, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeatures, VkPhysicalDeviceTilePropertiesFeaturesQCOM, VkPhysicalDeviceTimelineSemaphoreFeatures, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeatures, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT, VkPhysicalDeviceVulkan11Features, VkPhysicalDeviceVulkan12Features, VkPhysicalDeviceVulkan13Features, VkPhysicalDeviceVulkanMemoryModelFeatures, VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, VkPhysicalDeviceYcbcrImageArraysFeaturesEXT, or VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures" }, { "vuid": "VUID-VkDeviceCreateInfo-sType-unique", @@ -5388,17 +5388,33 @@ "vuid": "VUID-VkBufferMemoryBarrier2-buffer-01931", "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, + { + "vuid": "VUID-VkBufferMemoryBarrier2-buffer-09095", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex must be VK_QUEUE_FAMILY_EXTERNAL, VK_QUEUE_FAMILY_FOREIGN_EXT, or a valid queue family" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2-buffer-09096", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, dstQueueFamilyIndex must be VK_QUEUE_FAMILY_EXTERNAL, VK_QUEUE_FAMILY_FOREIGN_EXT, or a valid queue family" + }, { "vuid": "VUID-VkBufferMemoryBarrier2-srcQueueFamilyIndex-04087", - "text": " If srcQueueFamilyIndex is not equal to dstQueueFamilyIndex, at least one must not be a special queue family reserved for external memory ownership transfers, as described in Queue Family Ownership Transfer" + "text": " If srcQueueFamilyIndex is not equal to dstQueueFamilyIndex, at least one of srcQueueFamilyIndex or dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL or VK_QUEUE_FAMILY_FOREIGN_EXT" }, { - "vuid": "VUID-VkBufferMemoryBarrier2-buffer-04088", - "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is one of the special queue family values reserved for external memory transfers, the other must be VK_QUEUE_FAMILY_IGNORED" + "vuid": "VUID-VkBufferMemoryBarrier2-None-09097", + "text": " If the VK_KHR_external_memory extension is not enabled, and the value of VkApplicationInfo::apiVersion used to create the VkInstance is not greater than or equal to Version 1.1, srcQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL" }, { - "vuid": "VUID-VkBufferMemoryBarrier2-buffer-04089", - "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in Queue Family Ownership Transfer" + "vuid": "VUID-VkBufferMemoryBarrier2-None-09098", + "text": " If the VK_KHR_external_memory extension is not enabled, and the value of VkApplicationInfo::apiVersion used to create the VkInstance is not greater than or equal to Version 1.1, dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2-srcQueueFamilyIndex-09099", + "text": " If the VK_EXT_queue_family_foreign extension is not enabled srcQueueFamilyIndex must not be VK_QUEUE_FAMILY_FOREIGN_EXT" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier2-dstQueueFamilyIndex-09100", + "text": " If the VK_EXT_queue_family_foreign extension is not enabled dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_FOREIGN_EXT" }, { "vuid": "VUID-VkBufferMemoryBarrier2-srcStageMask-03851", @@ -5456,22 +5472,46 @@ "vuid": "VUID-VkBufferMemoryBarrier-buffer-01931", "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-09095", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex must be VK_QUEUE_FAMILY_EXTERNAL, VK_QUEUE_FAMILY_FOREIGN_EXT, or a valid queue family" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-buffer-09096", + "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, dstQueueFamilyIndex must be VK_QUEUE_FAMILY_EXTERNAL, VK_QUEUE_FAMILY_FOREIGN_EXT, or a valid queue family" + }, { "vuid": "VUID-VkBufferMemoryBarrier-srcQueueFamilyIndex-04087", - "text": " If srcQueueFamilyIndex is not equal to dstQueueFamilyIndex, at least one must not be a special queue family reserved for external memory ownership transfers, as described in Queue Family Ownership Transfer" + "text": " If srcQueueFamilyIndex is not equal to dstQueueFamilyIndex, at least one of srcQueueFamilyIndex or dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL or VK_QUEUE_FAMILY_FOREIGN_EXT" }, { - "vuid": "VUID-VkBufferMemoryBarrier-buffer-04088", - "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is one of the special queue family values reserved for external memory transfers, the other must be VK_QUEUE_FAMILY_IGNORED" + "vuid": "VUID-VkBufferMemoryBarrier-None-09097", + "text": " If the VK_KHR_external_memory extension is not enabled, and the value of VkApplicationInfo::apiVersion used to create the VkInstance is not greater than or equal to Version 1.1, srcQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL" }, { - "vuid": "VUID-VkBufferMemoryBarrier-buffer-04089", - "text": " If buffer was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in Queue Family Ownership Transfer" + "vuid": "VUID-VkBufferMemoryBarrier-None-09098", + "text": " If the VK_KHR_external_memory extension is not enabled, and the value of VkApplicationInfo::apiVersion used to create the VkInstance is not greater than or equal to Version 1.1, dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL" }, { - "vuid": "VUID-VkBufferMemoryBarrier-synchronization2-03853", + "vuid": "VUID-VkBufferMemoryBarrier-srcQueueFamilyIndex-09099", + "text": " If the VK_EXT_queue_family_foreign extension is not enabled srcQueueFamilyIndex must not be VK_QUEUE_FAMILY_FOREIGN_EXT" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-dstQueueFamilyIndex-09100", + "text": " If the VK_EXT_queue_family_foreign extension is not enabled dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_FOREIGN_EXT" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-None-09049", "text": " If the synchronization2 feature is not enabled, and buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, at least one of srcQueueFamilyIndex and dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED" }, + { + "vuid": "VUID-VkBufferMemoryBarrier-None-09050", + "text": " If the synchronization2 feature is not enabled, and buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED or VK_QUEUE_FAMILY_EXTERNAL" + }, + { + "vuid": "VUID-VkBufferMemoryBarrier-None-09051", + "text": " If the synchronization2 feature is not enabled, and buffer was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED or VK_QUEUE_FAMILY_EXTERNAL" + }, { "vuid": "VUID-VkBufferMemoryBarrier-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER" @@ -5984,17 +6024,33 @@ "vuid": "VUID-VkImageMemoryBarrier2-oldLayout-02088", "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR then image must have been created with VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR set" }, + { + "vuid": "VUID-VkImageMemoryBarrier2-image-09117", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex must be VK_QUEUE_FAMILY_EXTERNAL, VK_QUEUE_FAMILY_FOREIGN_EXT, or a valid queue family" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2-image-09118", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, dstQueueFamilyIndex must be VK_QUEUE_FAMILY_EXTERNAL, VK_QUEUE_FAMILY_FOREIGN_EXT, or a valid queue family" + }, { "vuid": "VUID-VkImageMemoryBarrier2-srcQueueFamilyIndex-04070", - "text": " If srcQueueFamilyIndex is not equal to dstQueueFamilyIndex, at least one must not be a special queue family reserved for external memory ownership transfers, as described in Queue Family Ownership Transfer" + "text": " If srcQueueFamilyIndex is not equal to dstQueueFamilyIndex, at least one of srcQueueFamilyIndex or dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL or VK_QUEUE_FAMILY_FOREIGN_EXT" }, { - "vuid": "VUID-VkImageMemoryBarrier2-image-04071", - "text": " If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is one of the special queue family values reserved for external memory transfers, the other must be VK_QUEUE_FAMILY_IGNORED" + "vuid": "VUID-VkImageMemoryBarrier2-None-09119", + "text": " If the VK_KHR_external_memory extension is not enabled, and the value of VkApplicationInfo::apiVersion used to create the VkInstance is not greater than or equal to Version 1.1, srcQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL" }, { - "vuid": "VUID-VkImageMemoryBarrier2-image-04072", - "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in Queue Family Ownership Transfer" + "vuid": "VUID-VkImageMemoryBarrier2-None-09120", + "text": " If the VK_KHR_external_memory extension is not enabled, and the value of VkApplicationInfo::apiVersion used to create the VkInstance is not greater than or equal to Version 1.1, dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2-srcQueueFamilyIndex-09121", + "text": " If the VK_EXT_queue_family_foreign extension is not enabled srcQueueFamilyIndex must not be VK_QUEUE_FAMILY_FOREIGN_EXT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier2-dstQueueFamilyIndex-09122", + "text": " If the VK_EXT_queue_family_foreign extension is not enabled dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_FOREIGN_EXT" }, { "vuid": "VUID-VkImageMemoryBarrier2-srcQueueFamilyIndex-07120", @@ -6204,17 +6260,33 @@ "vuid": "VUID-VkImageMemoryBarrier-oldLayout-02088", "text": " If srcQueueFamilyIndex and dstQueueFamilyIndex define a queue family ownership transfer or oldLayout and newLayout define an image layout transition, and oldLayout or newLayout is VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR then image must have been created with VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR set" }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-09117", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex must be VK_QUEUE_FAMILY_EXTERNAL, VK_QUEUE_FAMILY_FOREIGN_EXT, or a valid queue family" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-image-09118", + "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, dstQueueFamilyIndex must be VK_QUEUE_FAMILY_EXTERNAL, VK_QUEUE_FAMILY_FOREIGN_EXT, or a valid queue family" + }, { "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-04070", - "text": " If srcQueueFamilyIndex is not equal to dstQueueFamilyIndex, at least one must not be a special queue family reserved for external memory ownership transfers, as described in Queue Family Ownership Transfer" + "text": " If srcQueueFamilyIndex is not equal to dstQueueFamilyIndex, at least one of srcQueueFamilyIndex or dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL or VK_QUEUE_FAMILY_FOREIGN_EXT" }, { - "vuid": "VUID-VkImageMemoryBarrier-image-04071", - "text": " If image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, and one of srcQueueFamilyIndex and dstQueueFamilyIndex is one of the special queue family values reserved for external memory transfers, the other must be VK_QUEUE_FAMILY_IGNORED" + "vuid": "VUID-VkImageMemoryBarrier-None-09119", + "text": " If the VK_KHR_external_memory extension is not enabled, and the value of VkApplicationInfo::apiVersion used to create the VkInstance is not greater than or equal to Version 1.1, srcQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL" }, { - "vuid": "VUID-VkImageMemoryBarrier-image-04072", - "text": " If image was created with a sharing mode of VK_SHARING_MODE_EXCLUSIVE, and srcQueueFamilyIndex and dstQueueFamilyIndex are not equal, srcQueueFamilyIndex and dstQueueFamilyIndex must both be valid queue families, or one of the special queue family values reserved for external memory transfers, as described in Queue Family Ownership Transfer" + "vuid": "VUID-VkImageMemoryBarrier-None-09120", + "text": " If the VK_KHR_external_memory extension is not enabled, and the value of VkApplicationInfo::apiVersion used to create the VkInstance is not greater than or equal to Version 1.1, dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_EXTERNAL" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-09121", + "text": " If the VK_EXT_queue_family_foreign extension is not enabled srcQueueFamilyIndex must not be VK_QUEUE_FAMILY_FOREIGN_EXT" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-dstQueueFamilyIndex-09122", + "text": " If the VK_EXT_queue_family_foreign extension is not enabled dstQueueFamilyIndex must not be VK_QUEUE_FAMILY_FOREIGN_EXT" }, { "vuid": "VUID-VkImageMemoryBarrier-srcQueueFamilyIndex-07120", @@ -6293,9 +6365,17 @@ "text": " If the aspectMask member of subresourceRange includes VK_IMAGE_ASPECT_STENCIL_BIT, oldLayout and newLayout must not be one of VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL" }, { - "vuid": "VUID-VkImageMemoryBarrier-synchronization2-03857", + "vuid": "VUID-VkImageMemoryBarrier-None-09052", "text": " If the synchronization2 feature is not enabled, and image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, at least one of srcQueueFamilyIndex and dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED" }, + { + "vuid": "VUID-VkImageMemoryBarrier-None-09053", + "text": " If the synchronization2 feature is not enabled, and image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, srcQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED or VK_QUEUE_FAMILY_EXTERNAL" + }, + { + "vuid": "VUID-VkImageMemoryBarrier-None-09054", + "text": " If the synchronization2 feature is not enabled, and image was created with a sharing mode of VK_SHARING_MODE_CONCURRENT, dstQueueFamilyIndex must be VK_QUEUE_FAMILY_IGNORED or VK_QUEUE_FAMILY_EXTERNAL" + }, { "vuid": "VUID-VkImageMemoryBarrier-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER" @@ -6326,6 +6406,106 @@ } ] }, + "vkTransitionImageLayoutEXT": { + "core": [ + { + "vuid": "VUID-vkTransitionImageLayoutEXT-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkTransitionImageLayoutEXT-pTransitions-parameter", + "text": " pTransitions must be a valid pointer to an array of transitionCount valid VkHostImageLayoutTransitionInfoEXT structures" + }, + { + "vuid": "VUID-vkTransitionImageLayoutEXT-transitionCount-arraylength", + "text": " transitionCount must be greater than 0" + } + ] + }, + "VkHostImageLayoutTransitionInfoEXT": { + "core": [ + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-image-09055", + "text": " image must have been created with VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-subresourceRange-01486", + "text": " subresourceRange.baseMipLevel must be less than the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-subresourceRange-01724", + "text": " If subresourceRange.levelCount is not VK_REMAINING_MIP_LEVELS, subresourceRange.baseMipLevel + subresourceRange.levelCount must be less than or equal to the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-subresourceRange-01488", + "text": " subresourceRange.baseArrayLayer must be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-subresourceRange-01725", + "text": " If subresourceRange.layerCount is not VK_REMAINING_ARRAY_LAYERS, subresourceRange.baseArrayLayer + subresourceRange.layerCount must be less than or equal to the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-image-01932", + "text": " If image is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-image-01671", + "text": " If image has a single-plane color format or is not disjoint, then the aspectMask member of subresourceRange must be VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-image-01672", + "text": " If image has a multi-planar format and the image is disjoint, then the aspectMask member of subresourceRange must include at least one multi-planar aspect mask or VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-image-03319", + "text": " If image has a depth/stencil format with both depth and stencil and the separateDepthStencilLayouts feature is enabled, then the aspectMask member of subresourceRange must include either or both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-image-03320", + "text": " If image has a depth/stencil format with both depth and stencil and the separateDepthStencilLayouts feature is not enabled, then the aspectMask member of subresourceRange must include both VK_IMAGE_ASPECT_DEPTH_BIT and VK_IMAGE_ASPECT_STENCIL_BIT" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-aspectMask-08702", + "text": " If the aspectMask member of subresourceRange includes VK_IMAGE_ASPECT_DEPTH_BIT, oldLayout and newLayout must not be one of VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-aspectMask-08703", + "text": " If the aspectMask member of subresourceRange includes VK_IMAGE_ASPECT_STENCIL_BIT, oldLayout and newLayout must not be one of VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-oldLayout-09056", + "text": " oldLayout must be either VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PREINITIALIZED, or one of the layouts in VkPhysicalDeviceHostImageCopyPropertiesEXT::pCopySrcLayouts" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-newLayout-09057", + "text": " newLayout must be one of the layouts in VkPhysicalDeviceHostImageCopyPropertiesEXT::pCopyDstLayouts" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-image-parameter", + "text": " image must be a valid VkImage handle" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-oldLayout-parameter", + "text": " oldLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-newLayout-parameter", + "text": " newLayout must be a valid VkImageLayout value" + }, + { + "vuid": "VUID-VkHostImageLayoutTransitionInfoEXT-subresourceRange-parameter", + "text": " subresourceRange must be a valid VkImageSubresourceRange structure" + } + ] + }, "VkExternalMemoryAcquireUnmodifiedEXT": { "core": [ { @@ -6688,6 +6868,10 @@ "vuid": "VUID-VkRenderingInfo-perViewRenderAreaCount-07858", "text": " If the perViewRenderAreaCount member of a VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM structure included in the pNext chain is not 0, then renderArea must specify a render area that includes the union of all per view render areas." }, + { + "vuid": "VUID-VkRenderingInfo-None-09044", + "text": " Valid attachments specified by this structure must not be bound to memory locations that are bound to any other valid attachments specified by this structure" + }, { "vuid": "VUID-VkRenderingInfo-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_RENDERING_INFO" @@ -8182,7 +8366,7 @@ }, { "vuid": "VUID-VkSubpassDescription2-attachment-06251", - "text": " If the attachment member of pDepthStencilAttachment is not VK_ATTACHMENT_UNUSED and its pNext chain includes a VkAttachmentDescriptionStencilLayout structure, the layout member of pDepthStencilAttachment must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL" + "text": " If the attachment member of pDepthStencilAttachment is not VK_ATTACHMENT_UNUSED and its pNext chain includes a VkAttachmentReferenceStencilLayout structure, the layout member of pDepthStencilAttachment must not be VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL or VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL" }, { "vuid": "VUID-VkSubpassDescription2-pipelineBindPoint-04953", @@ -9128,6 +9312,10 @@ "vuid": "VUID-vkCmdBeginRenderPass-framebuffer-02532", "text": " For any attachment in framebuffer that is used by renderPass and is bound to memory locations that are also bound to another attachment used by renderPass, and if at least one of those uses causes either attachment to be written to, both attachments must have had the VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT set" }, + { + "vuid": "VUID-vkCmdBeginRenderPass-framebuffer-09045", + "text": " If any attachments specified in framebuffer are used by renderPass and are bound to overlapping memory locations, there must be only one that is used as a color attachment, depth/stencil, or resolve attachment in any subpass" + }, { "vuid": "VUID-vkCmdBeginRenderPass-initialLayout-07000", "text": " If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including either the VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT or VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT and either the VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT or VK_IMAGE_USAGE_SAMPLED_BIT usage bits" @@ -9220,6 +9408,10 @@ "vuid": "VUID-vkCmdBeginRenderPass2-framebuffer-02533", "text": " For any attachment in framebuffer that is used by renderPass and is bound to memory locations that are also bound to another attachment used by renderPass, and if at least one of those uses causes either attachment to be written to, both attachments must have had the VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT set" }, + { + "vuid": "VUID-vkCmdBeginRenderPass2-framebuffer-09046", + "text": " If any attachments specified in framebuffer are used by renderPass and are bound to overlapping memory locations, there must be only one that is used as a color attachment, depth/stencil, or resolve attachment in any subpass" + }, { "vuid": "VUID-vkCmdBeginRenderPass2-initialLayout-07002", "text": " If any of the initialLayout or finalLayout member of the VkAttachmentDescription structures or the layout member of the VkAttachmentReference structures specified when creating the render pass specified in the renderPass member of pRenderPassBegin is VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT then the corresponding attachment image view of the framebuffer specified in the framebuffer member of pRenderPassBegin must have been created with a usage value including either the VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT or VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT and either the VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT or VK_IMAGE_USAGE_SAMPLED_BIT usage bits" @@ -9353,8 +9545,8 @@ "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageViewCreateInfo::format equal to the corresponding value of VkAttachmentDescription::format in renderPass" }, { - "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-03217", - "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageCreateInfo::samples equal to the corresponding value of VkAttachmentDescription::samples in renderPass" + "vuid": "VUID-VkRenderPassBeginInfo-framebuffer-09047", + "text": " If framebuffer was created with a VkFramebufferCreateInfo::flags value that included VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, each element of the pAttachments member of a VkRenderPassAttachmentBeginInfo structure included in the pNext chain must be a VkImageView of an image created with a value of VkImageCreateInfo::samples equal to the corresponding value of VkAttachmentDescription::samples in renderPass , or VK_SAMPLE_COUNT_1_BIT if renderPass was created with VkMultisampledRenderToSingleSampledInfoEXT structure in the pNext chain with multisampledRenderToSingleSampledEnable equal to VK_TRUE" }, { "vuid": "VUID-VkRenderPassBeginInfo-pNext-02869", @@ -10965,8 +11157,12 @@ "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV" }, { - "vuid": "VUID-VkComputePipelineCreateInfo-flags-02874", - "text": " flags must not include VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" + "vuid": "VUID-VkComputePipelineCreateInfo-flags-09007", + "text": " If flags includes VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, then the VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedComputePipelines feature must be enabled" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-09008", + "text": " If flags includes VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, then the pNext chain must include a pointer to a valid instance of VkComputePipelineIndirectBufferInfoNV specifying the address where the pipeline’s metadata will be saved" }, { "vuid": "VUID-VkComputePipelineCreateInfo-pipelineCreationCacheControl-02875", @@ -11326,6 +11522,106 @@ } ] }, + "VkComputePipelineIndirectBufferInfoNV": { + "core": [ + { + "vuid": "VUID-VkComputePipelineIndirectBufferInfoNV-deviceGeneratedComputePipelines-09009", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedComputePipelines feature must be enabled" + }, + { + "vuid": "VUID-VkComputePipelineIndirectBufferInfoNV-flags-09010", + "text": " The pipeline creation flags in VkComputePipelineCreateInfo::flags must include VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" + }, + { + "vuid": "VUID-VkComputePipelineIndirectBufferInfoNV-deviceAddress-09011", + "text": " deviceAddress must be aligned to the VkMemoryRequirements2::alignment, as returned by vkGetPipelineIndirectMemoryRequirementsNV" + }, + { + "vuid": "VUID-VkComputePipelineIndirectBufferInfoNV-deviceAddress-09012", + "text": " deviceAddress must have been allocated from a buffer that was created with usage VK_BUFFER_USAGE_TRANSFER_DST_BIT and VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT" + }, + { + "vuid": "VUID-VkComputePipelineIndirectBufferInfoNV-size-09013", + "text": " size must be greater than or equal to the VkMemoryRequirements2::size, as returned by vkGetPipelineIndirectMemoryRequirementsNV" + }, + { + "vuid": "VUID-VkComputePipelineIndirectBufferInfoNV-pipelineDeviceAddressCaptureReplay-09014", + "text": " If pipelineDeviceAddressCaptureReplay is non-zero then the VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedComputeCaptureReplay feature must be enabled" + }, + { + "vuid": "VUID-VkComputePipelineIndirectBufferInfoNV-pipelineDeviceAddressCaptureReplay-09015", + "text": " If pipelineDeviceAddressCaptureReplay is non-zero then that address must have been allocated with flag VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT set" + }, + { + "vuid": "VUID-VkComputePipelineIndirectBufferInfoNV-pipelineDeviceAddressCaptureReplay-09016", + "text": " If pipelineDeviceAddressCaptureReplay is non-zero, the pipeline must have been recreated for replay" + }, + { + "vuid": "VUID-VkComputePipelineIndirectBufferInfoNV-pipelineDeviceAddressCaptureReplay-09017", + "text": " pipelineDeviceAddressCaptureReplay must satisfy the alignment and size requirements similar to deviceAddress" + }, + { + "vuid": "VUID-VkComputePipelineIndirectBufferInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV" + }, + { + "vuid": "VUID-VkComputePipelineIndirectBufferInfoNV-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, + "vkCmdUpdatePipelineIndirectBuffer": { + "core": [ + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-pipelineBindPoint-09018", + "text": " pipelineBindPoint must be VK_PIPELINE_BIND_POINT_COMPUTE" + }, + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-pipeline-09019", + "text": " pipeline must have been created with VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV flag set" + }, + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-pipeline-09020", + "text": " pipeline must have been created with VkComputePipelineIndirectBufferInfoNV structure specifying a valid address where its metadata will be saved" + }, + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-deviceGeneratedComputePipelines-09021", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedComputePipelines feature must be enabled" + }, + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-pipelineBindPoint-parameter", + "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" + }, + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-videocoding", + "text": " This command must only be called outside of a video coding scope" + }, + { + "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-commonparent", + "text": " Both of commandBuffer, and pipeline must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, "vkCreateGraphicsPipelines": { "core": [ { @@ -11421,8 +11717,12 @@ "text": " If the pipeline requires pre-rasterization shader state and pStages includes a tessellation evaluation shader stage, it must include a tessellation control shader stage" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00731", - "text": " If the pipeline requires pre-rasterization shader state and pStages includes a tessellation control shader stage and a tessellation evaluation shader stage, pTessellationState must be a valid pointer to a valid VkPipelineTessellationStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-09022", + "text": " If the pipeline requires pre-rasterization shader state and pStages includes a tessellation control shader stage, and the VK_EXT_extended_dynamic_state3 extension is not enabled or the VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT dynamic state is not set, pTessellationState must be a valid pointer to a valid VkPipelineTessellationStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pTessellationState-09023", + "text": " If pTessellationState is not NULL it must be a pointer to a valid VkPipelineTessellationStateCreateInfo structure" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-00732", @@ -11517,28 +11817,40 @@ "text": " If the pipeline requires pre-rasterization shader state, and the wideLines feature is not enabled, and no element of the pDynamicStates member of pDynamicState is VK_DYNAMIC_STATE_LINE_WIDTH, the lineWidth member of pRasterizationState must be 1.0" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00750", - "text": " If the pipeline requires pre-rasterization shader state, and the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, pViewportState must be a valid pointer to a valid VkPipelineViewportStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-09024", + "text": " If the pipeline requires pre-rasterization shader state, and the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE dynamic state is enabled or the rasterizerDiscardEnable member of pRasterizationState is VK_FALSE, and either the VK_EXT_extended_dynamic_state3 extension is not enabled, or either the VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT or VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT dynamic states are not set, pViewportState must be a valid pointer to a valid VkPipelineViewportStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pViewportState-09025", + "text": " If pViewportState is not NULL it must be a valid pointer to a valid VkPipelineViewportStateCreateInfo structure" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pViewportState-04892", "text": " If the pipeline requires pre-rasterization shader state, and the graphics pipeline state was created with the VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE dynamic state enabled, pViewportState must be a valid pointer to a valid VkPipelineViewportStateCreateInfo structure" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-rasterizerDiscardEnable-00751", - "text": " If the pipeline requires fragment output interface state, pMultisampleState must be a valid pointer to a valid VkPipelineMultisampleStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pMultisampleState-09026", + "text": " If the pipeline requires fragment output interface state, and the VK_EXT_extended_dynamic_state3 extension is not enabled or any of the VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT, VK_DYNAMIC_STATE_SAMPLE_MASK_EXT, VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT, or VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT dynamic states is not set, pMultisampleState must be a valid pointer to a valid VkPipelineMultisampleStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pMultisampleState-09027", + "text": " If pMultisampleState is not NULL is must be a valid pointer to a valid VkPipelineMultisampleStateCreateInfo structure" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-alphaToCoverageEnable-08891", "text": " If the pipeline is being created with fragment shader state, the VkPipelineMultisampleStateCreateInfo::alphaToCoverageEnable is not ignored and is VK_TRUE, then the Fragment Output Interface must contain a variable for the alpha Component word in Location 0 at Index 0" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-06043", - "text": " If renderPass is not VK_NULL_HANDLE, the pipeline is being created with fragment shader state, and subpass uses a depth/stencil attachment, pDepthStencilState must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-09028", + "text": " If renderPass is not VK_NULL_HANDLE, the pipeline is being created with fragment shader state, and subpass uses a depth/stencil attachment, and the VK_EXT_extended_dynamic_state3 extension is not enabled or, any of the VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, VK_DYNAMIC_STATE_STENCIL_OP, or VK_DYNAMIC_STATE_DEPTH_BOUNDS dynamic states are not set, pDepthStencilState must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-06044", - "text": " If renderPass is not VK_NULL_HANDLE, the pipeline is being created with fragment output interface state, and subpass uses color attachments, pColorBlendState must be a valid pointer to a valid VkPipelineColorBlendStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDepthStencilState-09029", + "text": " If pDepthStencilState is not NULL it must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-09030", + "text": " If renderPass is not VK_NULL_HANDLE, the pipeline is being created with fragment output interface state, and subpass uses color attachments, and VK_EXT_extended_dynamic_state3 extension is not enabled, or any of the VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT, VK_DYNAMIC_STATE_LOGIC_OP_EXT, VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT, VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT, VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, or VK_DYNAMIC_STATE_BLEND_CONSTANTS dynamic states are not set, pColorBlendState must be a valid pointer to a valid VkPipelineColorBlendStateCreateInfo structure" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-00754", @@ -11681,8 +11993,12 @@ "text": " If the pipeline is being created with vertex input state and pVertexInputState is not dynamic, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription::format must have a 64-bit component" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02098", - "text": " If the pipeline requires vertex input state, pInputAssemblyState must be a valid pointer to a valid VkPipelineInputAssemblyStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-dynamicPrimitiveTopologyUnrestricted-09031", + "text": " If the pipeline requires vertex input state, and the VK_EXT_extended_dynamic_state3 extension is not enabled, or either VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, or VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY dynamic states are not set, or dynamicPrimitiveTopologyUnrestricted is VK_FALSE, pInputAssemblyState must be a valid pointer to a valid VkPipelineInputAssemblyStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pInputAssemblyState-09032", + "text": " If pInputAssemblyState is not NULL it must be a valid pointer to a valid VkPipelineInputAssemblyStateCreateInfo structure" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-02317", @@ -11748,6 +12064,10 @@ "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-04947", "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV" }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-03378", + "text": " If the extendedDynamicState feature is not enabled, and the value of VkApplicationInfo::apiVersion used to create the VkInstance is less than Version 1.3 there must be no element of the pDynamicStates member of pDynamicState set to VK_DYNAMIC_STATE_CULL_MODE, VK_DYNAMIC_STATE_FRONT_FACE, VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT, VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT, VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE, VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, or VK_DYNAMIC_STATE_STENCIL_OP" + }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-03379", "text": " If the pipeline requires pre-rasterization shader state, and VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT is included in the pDynamicStates array then viewportCount must be zero" @@ -11768,6 +12088,10 @@ "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-07065", "text": " If the pipeline requires pre-rasterization shader state, and includes a mesh shader, there must be no element of the pDynamicStates member of pDynamicState set to VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, or VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE" }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04868", + "text": " If the extendedDynamicState2 feature is not enabled, and the value of VkApplicationInfo::apiVersion used to create the VkInstance is less than Version 1.3 there must be no element of the pDynamicStates member of pDynamicState set to VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, or VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE" + }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDynamicStates-04869", "text": " If the extendedDynamicState2LogicOp feature is not enabled, there must be no element of the pDynamicStates member of pDynamicState set to VK_DYNAMIC_STATE_LOGIC_OP_EXT" @@ -11788,6 +12112,14 @@ "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-02966", "text": " If the pipeline requires pre-rasterization shader state and flags includes VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, then all stages must not specify Xfb execution mode" }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-libraryCount-06648", + "text": " If the pipeline is not created with a complete set of state, or VkPipelineLibraryCreateInfoKHR::libraryCount is not 0, VkGraphicsPipelineShaderGroupsCreateInfoNV::groupCount and VkGraphicsPipelineShaderGroupsCreateInfoNV::pipelineCount must be 0" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-libraryCount-06649", + "text": " If the pipeline is created with a complete set of state, and VkPipelineLibraryCreateInfoKHR::libraryCount is 0, and the pNext chain includes an instance of VkGraphicsPipelineShaderGroupsCreateInfoNV, VkGraphicsPipelineShaderGroupsCreateInfoNV::groupCount must be greater than 0" + }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pipelineCreationCacheControl-02878", "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT" @@ -11993,16 +12325,28 @@ "text": " If the pipeline requires fragment output interface state, renderPass is VK_NULL_HANDLE, VkPipelineRenderingCreateInfo::depthAttachmentFormat is not VK_FORMAT_UNDEFINED, and VkPipelineRenderingCreateInfo::stencilAttachmentFormat is not VK_FORMAT_UNDEFINED, depthAttachmentFormat must equal stencilAttachmentFormat" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-06053", - "text": " If renderPass is VK_NULL_HANDLE, the pipeline is being created with fragment shader state and fragment output interface state, and either of VkPipelineRenderingCreateInfo::depthAttachmentFormat or VkPipelineRenderingCreateInfo::stencilAttachmentFormat are not VK_FORMAT_UNDEFINED, pDepthStencilState must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-09033", + "text": " If renderPass is VK_NULL_HANDLE, the pipeline is being created with fragment shader state and fragment output interface state, and either of VkPipelineRenderingCreateInfo::depthAttachmentFormat or VkPipelineRenderingCreateInfo::stencilAttachmentFormat are not VK_FORMAT_UNDEFINED, and the VK_EXT_extended_dynamic_state3 extension is not enabled or any of the VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, VK_DYNAMIC_STATE_STENCIL_OP, or VK_DYNAMIC_STATE_DEPTH_BOUNDS dynamic states are not set, pDepthStencilState must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-06590", - "text": " If renderPass is VK_NULL_HANDLE and the pipeline is being created with fragment shader state but not fragment output interface state, pDepthStencilState must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDepthStencilState-09034", + "text": " If pDepthStencilState is not NULL it must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-06054", - "text": " If renderPass is VK_NULL_HANDLE, the pipeline is being created with fragment output interface state, and VkPipelineRenderingCreateInfo::colorAttachmentCount is not equal to 0, pColorBlendState must be a valid pointer to a valid VkPipelineColorBlendStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-09035", + "text": " If renderPass is VK_NULL_HANDLE and the pipeline is being created with fragment shader state but not fragment output interface state, and the VK_EXT_extended_dynamic_state3 extension is not enabled, or any of the VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, VK_DYNAMIC_STATE_STENCIL_OP, or VK_DYNAMIC_STATE_DEPTH_BOUNDS dynamic states are not set, pDepthStencilState must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pDepthStencilState-09036", + "text": " If pDepthStencilState is not NULL it must be a valid pointer to a valid VkPipelineDepthStencilStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-09037", + "text": " If renderPass is VK_NULL_HANDLE, the pipeline is being created with fragment output interface state, and VkPipelineRenderingCreateInfo::colorAttachmentCount is not equal to 0, and the VK_EXT_extended_dynamic_state3 extension is not enabled, or any of the VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT, VK_DYNAMIC_STATE_LOGIC_OP_EXT, VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT, VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT, VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, or VK_DYNAMIC_STATE_BLEND_CONSTANTS dynamic states are not set, pColorBlendState must be a valid pointer to a valid VkPipelineColorBlendStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pColorBlendState-09038", + "text": " If pColorBlendState is not NULL it must be a valid pointer to a valid VkPipelineColorBlendStateCreateInfo structure" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-06055", @@ -12104,6 +12448,30 @@ "vuid": "VUID-VkGraphicsPipelineCreateInfo-pLibraries-06599", "text": " If one element of VkPipelineLibraryCreateInfoKHR::pLibraries includes VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT and another element includes VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT, the value of VkMultiviewPerViewAttributesInfoNVX::perViewAttributesPositionXOnly specified in both libraries must be equal" }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pStages-06600", + "text": " If the pipeline requires pre-rasterization shader state or fragment shader state, pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pRasterizationState-09039", + "text": " If the VK_EXT_extended_dynamic_state3 extension is not enabled, or any of the VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT, VK_DYNAMIC_STATE_SAMPLE_MASK_EXT, VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT, or VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT dynamic states are not set then pRasterizationState must be a valid pointer to a valid VkPipelineRasterizationStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pRasterizationState-09040", + "text": " If pRasterizationState is not NULL it must be a valid pointer to a valid VkPipelineRasterizationStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-layout-06602", + "text": " If the pipeline requires fragment shader state or pre-rasterization shader state, layout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-renderPass-06603", + "text": " If pre-rasterization shader state, fragment shader state, or fragment output state, and renderPass is not VK_NULL_HANDLE, renderPass must be a valid VkRenderPass handle" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-stageCount-06604", + "text": " If the pipeline requires pre-rasterization shader state or fragment shader state, stageCount must be greater than 0" + }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-graphicsPipelineLibrary-06606", "text": " If the graphicsPipelineLibrary feature is not enabled, flags must not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" @@ -12269,8 +12637,12 @@ "text": " If VkGraphicsPipelineLibraryCreateInfoEXT::flags includes VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT or VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT, pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-06641", - "text": " If VkGraphicsPipelineLibraryCreateInfoEXT::flags includes VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT, pRasterizationState must be a valid pointer to a valid VkPipelineRasterizationStateCreateInfo structure" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-09041", + "text": " If VkGraphicsPipelineLibraryCreateInfoEXT::flags includes VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT, and the VK_EXT_extended_dynamic_state3 extension is not enabled, or any of the VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT, VK_DYNAMIC_STATE_SAMPLE_MASK_EXT, VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT, or VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT dynamic states are not set, pRasterizationState must be a valid pointer to a valid VkPipelineRasterizationStateCreateInfo structure" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pRasterizationState-09042", + "text": " If pRasterizationState is not NULL it must be a valid pointer to a valid VkPipelineRasterizationStateCreateInfo structure" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-06642", @@ -12296,14 +12668,6 @@ "vuid": "VUID-VkGraphicsPipelineCreateInfo-pLibraries-06647", "text": " If VkPipelineLibraryCreateInfoKHR::pLibraries includes at least one library, VkGraphicsPipelineLibraryCreateInfoEXT::flags is non-zero, and any library was created with VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, flags must include VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR" }, - { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-libraryCount-06648", - "text": " If the pipeline is not created with a complete set of state, or VkPipelineLibraryCreateInfoKHR::libraryCount is not 0, VkGraphicsPipelineShaderGroupsCreateInfoNV::groupCount and VkGraphicsPipelineShaderGroupsCreateInfoNV::pipelineCount must be 0" - }, - { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-libraryCount-06649", - "text": " If the pipeline is created with a complete set of state, VkPipelineLibraryCreateInfoKHR::libraryCount is 0, and the pNext chain includes an instance of VkGraphicsPipelineShaderGroupsCreateInfoNV, VkGraphicsPipelineShaderGroupsCreateInfoNV::groupCount must be greater than 0" - }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-None-07826", "text": " If the pipeline includes a complete set of state, VkPipelineLayout must be a valid pipeline layout" @@ -12536,6 +12900,10 @@ "vuid": "VUID-VkGraphicsPipelineCreateInfo-flags-08907", "text": " If VkGraphicsPipelineLibraryCreateInfoEXT::flags includes VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT, and pre-rasterization shader state is not specified, the pipeline must define fragment output interface state" }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-None-09043", + "text": " If pDynamicState->pDynamicStates does not include VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the colorWriteMask member of the corresponding element of pColorBlendState->pAttachments must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO" @@ -16860,6 +17228,10 @@ "vuid": "VUID-VkImageCreateInfo-pNext-06786", "text": " If the pNext chain includes a VkImportMetalTextureInfoEXT structure and the image has a multi-planar format with only two planes, then VkImportMetalTextureInfoEXT::plane must not be VK_IMAGE_ASPECT_PLANE_2_BIT" }, + { + "vuid": "VUID-VkImageCreateInfo-imageCreateFormatFeatures-09048", + "text": " If imageCreateFormatFeatures (as defined in Image Creation Limits) does not contain VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT, then usage must not contain VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT" + }, { "vuid": "VUID-VkImageCreateInfo-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO" @@ -17282,7 +17654,7 @@ }, { "vuid": "VUID-VkSubresourceLayout2EXT-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkImageCompressionPropertiesEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkImageCompressionPropertiesEXT or VkSubresourceHostMemcpySizeEXT" }, { "vuid": "VUID-VkSubresourceLayout2EXT-sType-unique", @@ -17290,6 +17662,14 @@ } ] }, + "VkSubresourceHostMemcpySizeEXT": { + "core": [ + { + "vuid": "VUID-VkSubresourceHostMemcpySizeEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT" + } + ] + }, "vkGetImageDrmFormatModifierPropertiesEXT": { "core": [ { @@ -21770,11 +22150,11 @@ }, { "vuid": "VUID-VkDescriptorImageInfo-descriptorType-06713", - "text": " If the image2DViewOf3D feature is not enabled and descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_IMAGE then imageView must not be a 2D view created from a 3D image" + "text": " If the image2DViewOf3D feature is not enabled or descriptorType is not VK_DESCRIPTOR_TYPE_STORAGE_IMAGE then imageView must not be a 2D view created from a 3D image" }, { "vuid": "VUID-VkDescriptorImageInfo-descriptorType-06714", - "text": " If the sampler2DViewOf3D feature is not enabled and descriptorType is VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER then imageView must not be a 2D view created from a 3D image" + "text": " If the sampler2DViewOf3D feature is not enabled or descriptorType is not VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE or VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER then imageView must not be a 2D view created from a 3D image" }, { "vuid": "VUID-VkDescriptorImageInfo-imageView-01976", @@ -22706,11 +23086,11 @@ }, { "vuid": "VUID-VkDescriptorAddressInfoEXT-nullDescriptor-08938", - "text": " If the nullDescriptor feature is enabled, and address is zero, range must be VK_WHOLE_SIZE" + "text": " If address is zero, range must be VK_WHOLE_SIZE" }, { "vuid": "VUID-VkDescriptorAddressInfoEXT-nullDescriptor-08939", - "text": " If the nullDescriptor feature is enabled and address is not zero, range must not be VK_WHOLE_SIZE" + "text": " If address is not zero, range must not be VK_WHOLE_SIZE" }, { "vuid": "VUID-VkDescriptorAddressInfoEXT-None-08044", @@ -22900,6 +23280,10 @@ "vuid": "VUID-vkCmdSetDescriptorBufferOffsetsEXT-pipelineBindPoint-08067", "text": " pipelineBindPoint must be supported by the commandBuffer’s parent VkCommandPool’s queue family" }, + { + "vuid": "VUID-vkCmdSetDescriptorBufferOffsetsEXT-firstSet-09006", + "text": " The VkDescriptorSetLayout for each set from firstSet to firstSet + setCount when layout was created must have been created with the VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT bit set" + }, { "vuid": "VUID-vkCmdSetDescriptorBufferOffsetsEXT-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -28018,11 +28402,11 @@ }, { "vuid": "VUID-vkCmdCopyBufferToImage-imageSubresource-07971", - "text": " For each element of pRegions, imageOffset.x and (imageExtent.width + imageOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified pname:imageSubresource of pname:dstImage" + "text": " For each element of pRegions, pname:imageOffset.x and (pname:imageExtent.width + pname:imageOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified pname:imageSubresource of pname:dstImage" }, { "vuid": "VUID-vkCmdCopyBufferToImage-imageSubresource-07972", - "text": " For each element of pRegions, imageOffset.y and (imageExtent.height + imageOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified pname:imageSubresource of pname:dstImage" + "text": " For each element of pRegions, pname:imageOffset.y and (pname:imageExtent.height + pname:imageOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified pname:imageSubresource of pname:dstImage" }, { "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07973", @@ -28052,82 +28436,6 @@ "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-07739", "text": " If the queue family used to create the VkCommandPool which commandBuffer was allocated from does not support VK_QUEUE_GRAPHICS_BIT, for each element of pname:pRegions, the aspectMask member of imageSubresource must not be VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT" }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07979", - "text": " If pname:dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, imageOffset.y must be 0 and imageExtent.height must be 1" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-imageOffset-00200", - "text": " For each element of pRegions, imageOffset.z and (imageExtent.depth + imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified imageSubresource of pname:dstImage" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07980", - "text": " If pname:dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, imageOffset.z must be 0 and imageExtent.depth must be 1" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-bufferRowLength-00203", - "text": " For each element of pRegions, bufferRowLength must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-bufferImageHeight-00204", - "text": " For each element of pRegions, bufferImageHeight must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-07274", - "text": " For each element of pRegions, imageOffset.x must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-07275", - "text": " For each element of pRegions, imageOffset.y must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-07276", - "text": " For each element of pRegions, imageOffset.z must be a multiple of the texel block extent depth of the VkFormat of pname:dstImage" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-imageExtent-00207", - "text": " For each element of pRegions, if the sum of imageOffset.x and extent.width does not equal the width of the subresource specified by srcSubresource, extent.width must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-imageExtent-00208", - "text": " For each element of pRegions, if the sum of imageOffset.y and extent.height does not equal the height of the subresource specified by srcSubresource, extent.height must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-imageExtent-00209", - "text": " For each element of pRegions, if the sum of imageOffset.z and extent.depth does not equal the depth of the subresource specified by srcSubresource, extent.depth must be a multiple of the texel block extent depth of the VkFormat of pname:dstImage" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-aspectMask-00211", - "text": " For each element of pRegions, imageSubresource.aspectMask must specify aspects present in pname:dstImage" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07981", - "text": " If pname:dstImage has a VkFormat with two planes then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07982", - "text": " If pname:dstImage has a VkFormat with three planes then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07983", - "text": " If pname:dstImage is of type VK_IMAGE_TYPE_3D, for each element of pRegions, imageSubresource.baseArrayLayer must be 0 and imageSubresource.layerCount must be 1" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-07277", - "text": " For each element of pRegions, bufferRowLength divided by the texel block extent width and then multiplied by the texel block size of pname:dstImage must be less than or equal to 231-1" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07975", - "text": " If pname:dstImage does not have either a depth/stencil or a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the texel block size" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07976", - "text": " If pname:dstImage has a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the element size of the compatible format for the format and the aspectMask of the imageSubresource as defined in Compatible formats of planes of multi-planar formats" - }, - { - "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07978", - "text": " If pname:dstImage has a depth/stencil format, the bufferOffset member of any element of pRegions must be a multiple of 4" - }, { "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-00171", "text": " srcBuffer must be large enough to contain all buffer locations that are accessed according to Buffer and Image Addressing, for each element of pRegions" @@ -28164,6 +28472,82 @@ "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-07931", "text": " If VK_EXT_depth_range_unrestricted is not enabled, for each element of pRegions whose imageSubresource contains a depth aspect, the data in srcBuffer must be in the range [0,1]" }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07979", + "text": " If pname:dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, pname:imageOffset.y must be 0 and pname:imageExtent.height must be 1" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageOffset-09104", + "text": " For each element of pRegions, pname:imageOffset.z and (pname:imageExtent.depth + pname:imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified pname:imageSubresource of pname:dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07980", + "text": " If pname:dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, pname:imageOffset.z must be 0 and pname:imageExtent.depth must be 1" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-07274", + "text": " For each element of pRegions, pname:imageOffset.x must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-07275", + "text": " For each element of pRegions, pname:imageOffset.y must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-pRegions-07276", + "text": " For each element of pRegions, pname:imageOffset.z must be a multiple of the texel block extent depth of the VkFormat of pname:dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageExtent-00207", + "text": " For each element of pRegions, if the sum of pname:imageOffset.x and extent.width does not equal the width of the subresource specified by srcSubresource, extent.width must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageExtent-00208", + "text": " For each element of pRegions, if the sum of pname:imageOffset.y and extent.height does not equal the height of the subresource specified by srcSubresource, extent.height must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageExtent-00209", + "text": " For each element of pRegions, if the sum of pname:imageOffset.z and extent.depth does not equal the depth of the subresource specified by srcSubresource, extent.depth must be a multiple of the texel block extent depth of the VkFormat of pname:dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-imageSubresource-09105", + "text": " For each element of pRegions, pname:imageSubresource.aspectMask must specify aspects present in pname:dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07981", + "text": " If pname:dstImage has a VkFormat with two planes then for each element of pRegions, pname:imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07982", + "text": " If pname:dstImage has a VkFormat with three planes then for each element of pRegions, pname:imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07983", + "text": " If pname:dstImage is of type VK_IMAGE_TYPE_3D, for each element of pRegions, pname:imageSubresource.baseArrayLayer must be 0 and pname:imageSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-bufferRowLength-09106", + "text": " For each element of pRegions, pname:bufferRowLength must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-bufferImageHeight-09107", + "text": " For each element of pRegions, pname:bufferImageHeight must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-bufferRowLength-09108", + "text": " For each element of pRegions, pname:bufferRowLength divided by the texel block extent width and then multiplied by the texel block size of pname:dstImage must be less than or equal to 231-1" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07975", + "text": " If pname:dstImage does not have either a depth/stencil or a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the texel block size" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07976", + "text": " If pname:dstImage has a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the element size of the compatible format for the format and the aspectMask of the imageSubresource as defined in Compatible formats of planes of multi-planar formats" + }, + { + "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07978", + "text": " If pname:dstImage has a depth/stencil format, the bufferOffset member of any element of pRegions must be a multiple of 4" + }, { "vuid": "VUID-vkCmdCopyBufferToImage-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -28234,11 +28618,11 @@ }, { "vuid": "VUID-vkCmdCopyImageToBuffer-imageSubresource-07971", - "text": " For each element of pRegions, imageOffset.x and (imageExtent.width + imageOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified pname:imageSubresource of pname:srcImage" + "text": " For each element of pRegions, pname:imageOffset.x and (pname:imageExtent.width + pname:imageOffset.x) must both be greater than or equal to 0 and less than or equal to the width of the specified pname:imageSubresource of pname:srcImage" }, { "vuid": "VUID-vkCmdCopyImageToBuffer-imageSubresource-07972", - "text": " For each element of pRegions, imageOffset.y and (imageExtent.height + imageOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified pname:imageSubresource of pname:srcImage" + "text": " For each element of pRegions, pname:imageOffset.y and (pname:imageExtent.height + pname:imageOffset.y) must both be greater than or equal to 0 and less than or equal to the height of the specified pname:imageSubresource of pname:srcImage" }, { "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07973", @@ -28264,82 +28648,6 @@ "vuid": "VUID-vkCmdCopyImageToBuffer-imageOffset-07747", "text": " The imageOffset and imageExtent members of each element of pname:pRegions must respect the image transfer granularity requirements of commandBuffer’s command pool’s queue family, as described in VkQueueFamilyProperties" }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07979", - "text": " If pname:srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, imageOffset.y must be 0 and imageExtent.height must be 1" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-imageOffset-00200", - "text": " For each element of pRegions, imageOffset.z and (imageExtent.depth + imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified imageSubresource of pname:srcImage" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07980", - "text": " If pname:srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, imageOffset.z must be 0 and imageExtent.depth must be 1" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-bufferRowLength-00203", - "text": " For each element of pRegions, bufferRowLength must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-bufferImageHeight-00204", - "text": " For each element of pRegions, bufferImageHeight must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-07274", - "text": " For each element of pRegions, imageOffset.x must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-07275", - "text": " For each element of pRegions, imageOffset.y must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-07276", - "text": " For each element of pRegions, imageOffset.z must be a multiple of the texel block extent depth of the VkFormat of pname:srcImage" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-imageExtent-00207", - "text": " For each element of pRegions, if the sum of imageOffset.x and extent.width does not equal the width of the subresource specified by srcSubresource, extent.width must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-imageExtent-00208", - "text": " For each element of pRegions, if the sum of imageOffset.y and extent.height does not equal the height of the subresource specified by srcSubresource, extent.height must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-imageExtent-00209", - "text": " For each element of pRegions, if the sum of imageOffset.z and extent.depth does not equal the depth of the subresource specified by srcSubresource, extent.depth must be a multiple of the texel block extent depth of the VkFormat of pname:srcImage" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-aspectMask-00211", - "text": " For each element of pRegions, imageSubresource.aspectMask must specify aspects present in pname:srcImage" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07981", - "text": " If pname:srcImage has a VkFormat with two planes then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07982", - "text": " If pname:srcImage has a VkFormat with three planes then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07983", - "text": " If pname:srcImage is of type VK_IMAGE_TYPE_3D, for each element of pRegions, imageSubresource.baseArrayLayer must be 0 and imageSubresource.layerCount must be 1" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-07277", - "text": " For each element of pRegions, bufferRowLength divided by the texel block extent width and then multiplied by the texel block size of pname:srcImage must be less than or equal to 231-1" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07975", - "text": " If pname:srcImage does not have either a depth/stencil or a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the texel block size" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07976", - "text": " If pname:srcImage has a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the element size of the compatible format for the format and the aspectMask of the imageSubresource as defined in Compatible formats of planes of multi-planar formats" - }, - { - "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07978", - "text": " If pname:srcImage has a depth/stencil format, the bufferOffset member of any element of pRegions must be a multiple of 4" - }, { "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-00183", "text": " dstBuffer must be large enough to contain all buffer locations that are accessed according to Buffer and Image Addressing, for each element of pRegions" @@ -28372,6 +28680,82 @@ "vuid": "VUID-vkCmdCopyImageToBuffer-srcImageLayout-01397", "text": " srcImageLayout must be VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR" }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07979", + "text": " If pname:srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, pname:imageOffset.y must be 0 and pname:imageExtent.height must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageOffset-09104", + "text": " For each element of pRegions, pname:imageOffset.z and (pname:imageExtent.depth + pname:imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified pname:imageSubresource of pname:srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07980", + "text": " If pname:srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, pname:imageOffset.z must be 0 and pname:imageExtent.depth must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-07274", + "text": " For each element of pRegions, pname:imageOffset.x must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-07275", + "text": " For each element of pRegions, pname:imageOffset.y must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-pRegions-07276", + "text": " For each element of pRegions, pname:imageOffset.z must be a multiple of the texel block extent depth of the VkFormat of pname:srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageExtent-00207", + "text": " For each element of pRegions, if the sum of pname:imageOffset.x and extent.width does not equal the width of the subresource specified by srcSubresource, extent.width must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageExtent-00208", + "text": " For each element of pRegions, if the sum of pname:imageOffset.y and extent.height does not equal the height of the subresource specified by srcSubresource, extent.height must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageExtent-00209", + "text": " For each element of pRegions, if the sum of pname:imageOffset.z and extent.depth does not equal the depth of the subresource specified by srcSubresource, extent.depth must be a multiple of the texel block extent depth of the VkFormat of pname:srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-imageSubresource-09105", + "text": " For each element of pRegions, pname:imageSubresource.aspectMask must specify aspects present in pname:srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07981", + "text": " If pname:srcImage has a VkFormat with two planes then for each element of pRegions, pname:imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07982", + "text": " If pname:srcImage has a VkFormat with three planes then for each element of pRegions, pname:imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07983", + "text": " If pname:srcImage is of type VK_IMAGE_TYPE_3D, for each element of pRegions, pname:imageSubresource.baseArrayLayer must be 0 and pname:imageSubresource.layerCount must be 1" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-bufferRowLength-09106", + "text": " For each element of pRegions, pname:bufferRowLength must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-bufferImageHeight-09107", + "text": " For each element of pRegions, pname:bufferImageHeight must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-bufferRowLength-09108", + "text": " For each element of pRegions, pname:bufferRowLength divided by the texel block extent width and then multiplied by the texel block size of pname:srcImage must be less than or equal to 231-1" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07975", + "text": " If pname:srcImage does not have either a depth/stencil or a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the texel block size" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07976", + "text": " If pname:srcImage has a multi-planar format, then for each element of pRegions, bufferOffset must be a multiple of the element size of the compatible format for the format and the aspectMask of the imageSubresource as defined in Compatible formats of planes of multi-planar formats" + }, + { + "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07978", + "text": " If pname:srcImage has a depth/stencil format, the bufferOffset member of any element of pRegions must be a multiple of 4" + }, { "vuid": "VUID-vkCmdCopyImageToBuffer-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -28421,15 +28805,15 @@ "VkBufferImageCopy": { "core": [ { - "vuid": "VUID-VkBufferImageCopy-bufferRowLength-00195", - "text": " bufferRowLength must be 0, or greater than or equal to the width member of imageExtent" + "vuid": "VUID-VkBufferImageCopy-bufferRowLength-09101", + "text": " pname:bufferRowLength must be 0, or greater than or equal to the width member of imageExtent" }, { - "vuid": "VUID-VkBufferImageCopy-bufferImageHeight-00196", - "text": " bufferImageHeight must be 0, or greater than or equal to the height member of imageExtent" + "vuid": "VUID-VkBufferImageCopy-bufferImageHeight-09102", + "text": " pname:bufferImageHeight must be 0, or greater than or equal to the height member of imageExtent" }, { - "vuid": "VUID-VkBufferImageCopy-aspectMask-00212", + "vuid": "VUID-VkBufferImageCopy-aspectMask-09103", "text": " The aspectMask member of imageSubresource must only have a single bit set" }, { @@ -28582,67 +28966,67 @@ }, { "vuid": "VUID-VkCopyBufferToImageInfo2-dstImage-07979", - "text": " If pname:dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, imageOffset.y must be 0 and imageExtent.height must be 1" + "text": " If pname:dstImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, pname:imageOffset.y must be 0 and pname:imageExtent.height must be 1" }, { - "vuid": "VUID-VkCopyBufferToImageInfo2-imageOffset-00200", - "text": " For each element of pRegions, imageOffset.z and (imageExtent.depth + imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified imageSubresource of pname:dstImage" + "vuid": "VUID-VkCopyBufferToImageInfo2-imageOffset-09104", + "text": " For each element of pRegions, pname:imageOffset.z and (pname:imageExtent.depth + pname:imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified pname:imageSubresource of pname:dstImage" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-dstImage-07980", - "text": " If pname:dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, imageOffset.z must be 0 and imageExtent.depth must be 1" - }, - { - "vuid": "VUID-VkCopyBufferToImageInfo2-bufferRowLength-00203", - "text": " For each element of pRegions, bufferRowLength must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" - }, - { - "vuid": "VUID-VkCopyBufferToImageInfo2-bufferImageHeight-00204", - "text": " For each element of pRegions, bufferImageHeight must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" + "text": " If pname:dstImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, pname:imageOffset.z must be 0 and pname:imageExtent.depth must be 1" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-pRegions-07274", - "text": " For each element of pRegions, imageOffset.x must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" + "text": " For each element of pRegions, pname:imageOffset.x must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-pRegions-07275", - "text": " For each element of pRegions, imageOffset.y must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" + "text": " For each element of pRegions, pname:imageOffset.y must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-pRegions-07276", - "text": " For each element of pRegions, imageOffset.z must be a multiple of the texel block extent depth of the VkFormat of pname:dstImage" + "text": " For each element of pRegions, pname:imageOffset.z must be a multiple of the texel block extent depth of the VkFormat of pname:dstImage" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-imageExtent-00207", - "text": " For each element of pRegions, if the sum of imageOffset.x and extent.width does not equal the width of the subresource specified by srcSubresource, extent.width must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" + "text": " For each element of pRegions, if the sum of pname:imageOffset.x and extent.width does not equal the width of the subresource specified by srcSubresource, extent.width must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-imageExtent-00208", - "text": " For each element of pRegions, if the sum of imageOffset.y and extent.height does not equal the height of the subresource specified by srcSubresource, extent.height must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" + "text": " For each element of pRegions, if the sum of pname:imageOffset.y and extent.height does not equal the height of the subresource specified by srcSubresource, extent.height must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-imageExtent-00209", - "text": " For each element of pRegions, if the sum of imageOffset.z and extent.depth does not equal the depth of the subresource specified by srcSubresource, extent.depth must be a multiple of the texel block extent depth of the VkFormat of pname:dstImage" + "text": " For each element of pRegions, if the sum of pname:imageOffset.z and extent.depth does not equal the depth of the subresource specified by srcSubresource, extent.depth must be a multiple of the texel block extent depth of the VkFormat of pname:dstImage" }, { - "vuid": "VUID-VkCopyBufferToImageInfo2-aspectMask-00211", - "text": " For each element of pRegions, imageSubresource.aspectMask must specify aspects present in pname:dstImage" + "vuid": "VUID-VkCopyBufferToImageInfo2-imageSubresource-09105", + "text": " For each element of pRegions, pname:imageSubresource.aspectMask must specify aspects present in pname:dstImage" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-dstImage-07981", - "text": " If pname:dstImage has a VkFormat with two planes then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" + "text": " If pname:dstImage has a VkFormat with two planes then for each element of pRegions, pname:imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-dstImage-07982", - "text": " If pname:dstImage has a VkFormat with three planes then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" + "text": " If pname:dstImage has a VkFormat with three planes then for each element of pRegions, pname:imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-dstImage-07983", - "text": " If pname:dstImage is of type VK_IMAGE_TYPE_3D, for each element of pRegions, imageSubresource.baseArrayLayer must be 0 and imageSubresource.layerCount must be 1" + "text": " If pname:dstImage is of type VK_IMAGE_TYPE_3D, for each element of pRegions, pname:imageSubresource.baseArrayLayer must be 0 and pname:imageSubresource.layerCount must be 1" }, { - "vuid": "VUID-VkCopyBufferToImageInfo2-pRegions-07277", - "text": " For each element of pRegions, bufferRowLength divided by the texel block extent width and then multiplied by the texel block size of pname:dstImage must be less than or equal to 231-1" + "vuid": "VUID-VkCopyBufferToImageInfo2-bufferRowLength-09106", + "text": " For each element of pRegions, pname:bufferRowLength must be a multiple of the texel block extent width of the VkFormat of pname:dstImage" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2-bufferImageHeight-09107", + "text": " For each element of pRegions, pname:bufferImageHeight must be a multiple of the texel block extent height of the VkFormat of pname:dstImage" + }, + { + "vuid": "VUID-VkCopyBufferToImageInfo2-bufferRowLength-09108", + "text": " For each element of pRegions, pname:bufferRowLength divided by the texel block extent width and then multiplied by the texel block size of pname:dstImage must be less than or equal to 231-1" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-dstImage-07975", @@ -28822,67 +29206,67 @@ }, { "vuid": "VUID-VkCopyImageToBufferInfo2-srcImage-07979", - "text": " If pname:srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, imageOffset.y must be 0 and imageExtent.height must be 1" + "text": " If pname:srcImage is of type VK_IMAGE_TYPE_1D, then for each element of pRegions, pname:imageOffset.y must be 0 and pname:imageExtent.height must be 1" }, { - "vuid": "VUID-VkCopyImageToBufferInfo2-imageOffset-00200", - "text": " For each element of pRegions, imageOffset.z and (imageExtent.depth + imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified imageSubresource of pname:srcImage" + "vuid": "VUID-VkCopyImageToBufferInfo2-imageOffset-09104", + "text": " For each element of pRegions, pname:imageOffset.z and (pname:imageExtent.depth + pname:imageOffset.z) must both be greater than or equal to 0 and less than or equal to the depth of the specified pname:imageSubresource of pname:srcImage" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-srcImage-07980", - "text": " If pname:srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, imageOffset.z must be 0 and imageExtent.depth must be 1" - }, - { - "vuid": "VUID-VkCopyImageToBufferInfo2-bufferRowLength-00203", - "text": " For each element of pRegions, bufferRowLength must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" - }, - { - "vuid": "VUID-VkCopyImageToBufferInfo2-bufferImageHeight-00204", - "text": " For each element of pRegions, bufferImageHeight must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" + "text": " If pname:srcImage is of type VK_IMAGE_TYPE_1D or VK_IMAGE_TYPE_2D, then for each element of pRegions, pname:imageOffset.z must be 0 and pname:imageExtent.depth must be 1" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-pRegions-07274", - "text": " For each element of pRegions, imageOffset.x must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" + "text": " For each element of pRegions, pname:imageOffset.x must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-pRegions-07275", - "text": " For each element of pRegions, imageOffset.y must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" + "text": " For each element of pRegions, pname:imageOffset.y must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-pRegions-07276", - "text": " For each element of pRegions, imageOffset.z must be a multiple of the texel block extent depth of the VkFormat of pname:srcImage" + "text": " For each element of pRegions, pname:imageOffset.z must be a multiple of the texel block extent depth of the VkFormat of pname:srcImage" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-imageExtent-00207", - "text": " For each element of pRegions, if the sum of imageOffset.x and extent.width does not equal the width of the subresource specified by srcSubresource, extent.width must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" + "text": " For each element of pRegions, if the sum of pname:imageOffset.x and extent.width does not equal the width of the subresource specified by srcSubresource, extent.width must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-imageExtent-00208", - "text": " For each element of pRegions, if the sum of imageOffset.y and extent.height does not equal the height of the subresource specified by srcSubresource, extent.height must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" + "text": " For each element of pRegions, if the sum of pname:imageOffset.y and extent.height does not equal the height of the subresource specified by srcSubresource, extent.height must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-imageExtent-00209", - "text": " For each element of pRegions, if the sum of imageOffset.z and extent.depth does not equal the depth of the subresource specified by srcSubresource, extent.depth must be a multiple of the texel block extent depth of the VkFormat of pname:srcImage" + "text": " For each element of pRegions, if the sum of pname:imageOffset.z and extent.depth does not equal the depth of the subresource specified by srcSubresource, extent.depth must be a multiple of the texel block extent depth of the VkFormat of pname:srcImage" }, { - "vuid": "VUID-VkCopyImageToBufferInfo2-aspectMask-00211", - "text": " For each element of pRegions, imageSubresource.aspectMask must specify aspects present in pname:srcImage" + "vuid": "VUID-VkCopyImageToBufferInfo2-imageSubresource-09105", + "text": " For each element of pRegions, pname:imageSubresource.aspectMask must specify aspects present in pname:srcImage" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-srcImage-07981", - "text": " If pname:srcImage has a VkFormat with two planes then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" + "text": " If pname:srcImage has a VkFormat with two planes then for each element of pRegions, pname:imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT or VK_IMAGE_ASPECT_PLANE_1_BIT" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-srcImage-07982", - "text": " If pname:srcImage has a VkFormat with three planes then for each element of pRegions, imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" + "text": " If pname:srcImage has a VkFormat with three planes then for each element of pRegions, pname:imageSubresource.aspectMask must be VK_IMAGE_ASPECT_PLANE_0_BIT, VK_IMAGE_ASPECT_PLANE_1_BIT, or VK_IMAGE_ASPECT_PLANE_2_BIT" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-srcImage-07983", - "text": " If pname:srcImage is of type VK_IMAGE_TYPE_3D, for each element of pRegions, imageSubresource.baseArrayLayer must be 0 and imageSubresource.layerCount must be 1" + "text": " If pname:srcImage is of type VK_IMAGE_TYPE_3D, for each element of pRegions, pname:imageSubresource.baseArrayLayer must be 0 and pname:imageSubresource.layerCount must be 1" }, { - "vuid": "VUID-VkCopyImageToBufferInfo2-pRegions-07277", - "text": " For each element of pRegions, bufferRowLength divided by the texel block extent width and then multiplied by the texel block size of pname:srcImage must be less than or equal to 231-1" + "vuid": "VUID-VkCopyImageToBufferInfo2-bufferRowLength-09106", + "text": " For each element of pRegions, pname:bufferRowLength must be a multiple of the texel block extent width of the VkFormat of pname:srcImage" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2-bufferImageHeight-09107", + "text": " For each element of pRegions, pname:bufferImageHeight must be a multiple of the texel block extent height of the VkFormat of pname:srcImage" + }, + { + "vuid": "VUID-VkCopyImageToBufferInfo2-bufferRowLength-09108", + "text": " For each element of pRegions, pname:bufferRowLength divided by the texel block extent width and then multiplied by the texel block size of pname:srcImage must be less than or equal to 231-1" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-srcImage-07975", @@ -28941,15 +29325,15 @@ "VkBufferImageCopy2": { "core": [ { - "vuid": "VUID-VkBufferImageCopy2-bufferRowLength-00195", - "text": " bufferRowLength must be 0, or greater than or equal to the width member of imageExtent" + "vuid": "VUID-VkBufferImageCopy2-bufferRowLength-09101", + "text": " pname:bufferRowLength must be 0, or greater than or equal to the width member of imageExtent" }, { - "vuid": "VUID-VkBufferImageCopy2-bufferImageHeight-00196", - "text": " bufferImageHeight must be 0, or greater than or equal to the height member of imageExtent" + "vuid": "VUID-VkBufferImageCopy2-bufferImageHeight-09102", + "text": " pname:bufferImageHeight must be 0, or greater than or equal to the height member of imageExtent" }, { - "vuid": "VUID-VkBufferImageCopy2-aspectMask-00212", + "vuid": "VUID-VkBufferImageCopy2-aspectMask-09103", "text": " The aspectMask member of imageSubresource must only have a single bit set" }, { @@ -30398,11 +30782,11 @@ "core": [ { "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-06252", - "text": " If topology is VK_PRIMITIVE_TOPOLOGY_POINT_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY or VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, and primitiveRestartEnable is VK_TRUE, the primitiveTopologyListRestart feature must be enabled" + "text": " If the primitiveTopologyListRestart feature is not enabled, and topology is VK_PRIMITIVE_TOPOLOGY_POINT_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, or VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, primitiveRestartEnable must be VK_FALSE" }, { "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-06253", - "text": " If topology is VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, and primitiveRestartEnable is VK_TRUE, the primitiveTopologyPatchListRestart feature must be enabled" + "text": " If the primitiveTopologyPatchListRestart feature is not enabled, and topology is VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, primitiveRestartEnable must be VK_FALSE" }, { "vuid": "VUID-VkPipelineInputAssemblyStateCreateInfo-topology-00429", @@ -30568,6 +30952,10 @@ "vuid": "VUID-vkCmdDraw-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDraw-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDraw-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -31732,6 +32120,10 @@ "vuid": "VUID-vkCmdDraw-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDraw-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDraw-commandBuffer-02712", "text": " If commandBuffer is a protected command buffer and protectedNoFault is not supported, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource" @@ -31872,6 +32264,10 @@ "vuid": "VUID-vkCmdDrawIndexed-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawIndexed-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -33036,6 +33432,10 @@ "vuid": "VUID-vkCmdDrawIndexed-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-02712", "text": " If commandBuffer is a protected command buffer and protectedNoFault is not supported, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource" @@ -33184,6 +33584,10 @@ "vuid": "VUID-vkCmdDrawMultiEXT-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawMultiEXT-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawMultiEXT-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -34348,6 +34752,10 @@ "vuid": "VUID-vkCmdDrawMultiEXT-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawMultiEXT-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawMultiEXT-commandBuffer-02712", "text": " If commandBuffer is a protected command buffer and protectedNoFault is not supported, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource" @@ -34504,6 +34912,10 @@ "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -35668,6 +36080,10 @@ "vuid": "VUID-vkCmdDrawMultiIndexedEXT-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-commandBuffer-02712", "text": " If commandBuffer is a protected command buffer and protectedNoFault is not supported, any resource written to by the VkPipeline object bound to the pipeline bind point used by this command must not be an unprotected resource" @@ -35836,6 +36252,10 @@ "vuid": "VUID-vkCmdDrawIndirect-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawIndirect-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -37000,6 +37420,10 @@ "vuid": "VUID-vkCmdDrawIndirect-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawIndirect-None-04007", "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have either valid or VK_NULL_HANDLE buffers bound" @@ -37184,6 +37608,10 @@ "vuid": "VUID-vkCmdDrawIndirectCount-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawIndirectCount-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -38348,6 +38776,10 @@ "vuid": "VUID-vkCmdDrawIndirectCount-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-04007", "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have either valid or VK_NULL_HANDLE buffers bound" @@ -38544,6 +38976,10 @@ "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -39708,6 +40144,10 @@ "vuid": "VUID-vkCmdDrawIndexedIndirect-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04007", "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have either valid or VK_NULL_HANDLE buffers bound" @@ -39900,6 +40340,10 @@ "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -41064,6 +41508,10 @@ "vuid": "VUID-vkCmdDrawIndexedIndirectCount-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04007", "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have either valid or VK_NULL_HANDLE buffers bound" @@ -41268,6 +41716,10 @@ "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -42432,6 +42884,10 @@ "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04007", "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have either valid or VK_NULL_HANDLE buffers bound" @@ -42692,6 +43148,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -43856,6 +44316,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksNV-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-stage-06480", "text": " The bound graphics pipeline must not have been created with the VkPipelineShaderStageCreateInfo::stage member of an element of VkGraphicsPipelineCreateInfo::pStages set to VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT or VK_SHADER_STAGE_GEOMETRY_BIT" @@ -43928,6 +44392,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -45092,6 +45560,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-stage-06480", "text": " The bound graphics pipeline must not have been created with the VkPipelineShaderStageCreateInfo::stage member of an element of VkGraphicsPipelineCreateInfo::pStages set to VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT or VK_SHADER_STAGE_GEOMETRY_BIT" @@ -45212,6 +45684,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -46376,6 +46852,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-stage-06480", "text": " The bound graphics pipeline must not have been created with the VkPipelineShaderStageCreateInfo::stage member of an element of VkGraphicsPipelineCreateInfo::pStages set to VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT or VK_SHADER_STAGE_GEOMETRY_BIT" @@ -46512,6 +46992,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawMeshTasksEXT-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -47676,6 +48160,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksEXT-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawMeshTasksEXT-stage-06480", "text": " The bound graphics pipeline must not have been created with the VkPipelineShaderStageCreateInfo::stage member of an element of VkGraphicsPipelineCreateInfo::pStages set to VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT or VK_SHADER_STAGE_GEOMETRY_BIT" @@ -47776,6 +48264,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -48940,6 +49432,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-stage-06480", "text": " The bound graphics pipeline must not have been created with the VkPipelineShaderStageCreateInfo::stage member of an element of VkGraphicsPipelineCreateInfo::pStages set to VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT or VK_SHADER_STAGE_GEOMETRY_BIT" @@ -49088,6 +49584,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -50252,6 +50752,10 @@ "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-stage-06480", "text": " The bound graphics pipeline must not have been created with the VkPipelineShaderStageCreateInfo::stage member of an element of VkGraphicsPipelineCreateInfo::pStages set to VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT or VK_SHADER_STAGE_GEOMETRY_BIT" @@ -50388,6 +50892,10 @@ "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawClusterHUAWEI-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -51552,6 +52060,10 @@ "vuid": "VUID-vkCmdDrawClusterHUAWEI-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawClusterHUAWEI-stage-06480", "text": " The bound graphics pipeline must not have been created with the VkPipelineShaderStageCreateInfo::stage member of an element of VkGraphicsPipelineCreateInfo::pStages set to VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT or VK_SHADER_STAGE_GEOMETRY_BIT" @@ -51636,6 +52148,10 @@ "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -52800,6 +53316,10 @@ "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-stage-06480", "text": " The bound graphics pipeline must not have been created with the VkPipelineShaderStageCreateInfo::stage member of an element of VkGraphicsPipelineCreateInfo::pStages set to VK_SHADER_STAGE_VERTEX_BIT, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT or VK_SHADER_STAGE_GEOMETRY_BIT" @@ -56824,6 +57344,10 @@ "vuid": "VUID-vkCmdDispatch-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDispatch-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDispatch-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -57092,6 +57616,10 @@ "vuid": "VUID-vkCmdDispatchIndirect-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDispatchIndirect-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDispatchIndirect-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -57380,6 +57908,10 @@ "vuid": "VUID-vkCmdDispatchBase-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdDispatchBase-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdDispatchBase-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -57664,6 +58196,10 @@ "vuid": "VUID-vkCmdSubpassShadingHUAWEI-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdSubpassShadingHUAWEI-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdSubpassShadingHUAWEI-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -57914,7 +58450,7 @@ "core": [ { "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pipelineBindPoint-02930", - "text": " The pipelineBindPoint must be VK_PIPELINE_BIND_POINT_GRAPHICS" + "text": " The pipelineBindPoint must be VK_PIPELINE_BIND_POINT_GRAPHICS or VK_PIPELINE_BIND_POINT_COMPUTE" }, { "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-tokenCount-02931", @@ -57930,7 +58466,7 @@ }, { "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pTokens-02934", - "text": " All state tokens in pTokens must occur before any work provoking tokens (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV)" + "text": " All state tokens in pTokens must occur before any work provoking tokens (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV, VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV , VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV )" }, { "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pTokens-02935", @@ -57944,6 +58480,18 @@ "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pStreamStrides-02937", "text": " each element of pStreamStrides must be greater than `0`and less than or equal to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::maxIndirectCommandsStreamStride. Furthermore the alignment of each token input must be ensured" }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pipelineBindPoint-09088", + "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_COMPUTE then the VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedCompute feature must be enabled" + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pipelineBindPoint-09089", + "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_COMPUTE then the state tokens in pTokens must only include VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV, VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV, or VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV" + }, + { + "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-pipelineBindPoint-09090", + "text": " If pipelineBindPoint is VK_PIPELINE_BIND_POINT_COMPUTE and pTokens includes VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV, then the VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedComputePipelines feature must be enabled" + }, { "vuid": "VUID-VkIndirectCommandsLayoutCreateInfoNV-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV" @@ -58078,6 +58626,26 @@ } ] }, + "VkBindPipelineIndirectCommandNV": { + "core": [ + { + "vuid": "VUID-VkBindPipelineIndirectCommandNV-deviceGeneratedComputePipelines-09091", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedComputePipelines feature must be enabled" + }, + { + "vuid": "VUID-VkBindPipelineIndirectCommandNV-None-09092", + "text": " The referenced pipeline must have been created with VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" + }, + { + "vuid": "VUID-VkBindPipelineIndirectCommandNV-None-09093", + "text": " The referenced pipeline must have been updated with vkCmdUpdatePipelineIndirectBuffer" + }, + { + "vuid": "VUID-VkBindPipelineIndirectCommandNV-None-09094", + "text": " The referenced pipeline’s address must have been queried with vkGetPipelineIndirectDeviceAddressNV" + } + ] + }, "VkIndirectCommandsLayoutTokenNV": { "core": [ { @@ -58168,6 +58736,10 @@ "vuid": "VUID-vkGetGeneratedCommandsMemoryRequirementsNV-deviceGeneratedCommands-02906", "text": " The VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV::deviceGeneratedCommands feature must be enabled" }, + { + "vuid": "VUID-vkGetGeneratedCommandsMemoryRequirementsNV-pInfo-09074", + "text": " If pInfo::pipelineBindPoint is of type VK_PIPELINE_BIND_POINT_COMPUTE, then the VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedCompute feature must be enabled" + }, { "vuid": "VUID-vkGetGeneratedCommandsMemoryRequirementsNV-device-parameter", "text": " device must be a valid VkDevice handle" @@ -58188,6 +58760,18 @@ "vuid": "VUID-VkGeneratedCommandsMemoryRequirementsInfoNV-maxSequencesCount-02907", "text": " maxSequencesCount must be less or equal to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::maxIndirectSequenceCount" }, + { + "vuid": "VUID-VkGeneratedCommandsMemoryRequirementsInfoNV-pipelineBindPoint-09075", + "text": " If pipelineBindPoint is of type VK_PIPELINE_BIND_POINT_GRAPHICS, then pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-VkGeneratedCommandsMemoryRequirementsInfoNV-pipelineBindPoint-09076", + "text": " If pipelineBindPoint is of type VK_PIPELINE_BIND_POINT_COMPUTE, and the indirectCommandsLayout was not created with a VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV token, then the pipeline must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-VkGeneratedCommandsMemoryRequirementsInfoNV-pipelineBindPoint-09077", + "text": " If pipelineBindPoint is of type VK_PIPELINE_BIND_POINT_COMPUTE, and the indirectCommandsLayout contains a VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV token, then the pipeline must be NULL" + }, { "vuid": "VUID-VkGeneratedCommandsMemoryRequirementsInfoNV-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV" @@ -58202,7 +58786,7 @@ }, { "vuid": "VUID-VkGeneratedCommandsMemoryRequirementsInfoNV-pipeline-parameter", - "text": " pipeline must be a valid VkPipeline handle" + "text": " If pipeline is not VK_NULL_HANDLE, pipeline must be a valid VkPipeline handle" }, { "vuid": "VUID-VkGeneratedCommandsMemoryRequirementsInfoNV-indirectCommandsLayout-parameter", @@ -58210,7 +58794,55 @@ }, { "vuid": "VUID-VkGeneratedCommandsMemoryRequirementsInfoNV-commonparent", - "text": " Both of indirectCommandsLayout, and pipeline must have been created, allocated, or retrieved from the same VkDevice" + "text": " Both of indirectCommandsLayout, and pipeline that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "vkGetPipelineIndirectDeviceAddressNV": { + "core": [ + { + "vuid": "VUID-vkGetPipelineIndirectDeviceAddressNV-deviceGeneratedComputePipelines-09078", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedComputePipelines feature must be enabled" + }, + { + "vuid": "VUID-vkGetPipelineIndirectDeviceAddressNV-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetPipelineIndirectDeviceAddressNV-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkPipelineIndirectDeviceAddressInfoNV structure" + } + ] + }, + "VkPipelineIndirectDeviceAddressInfoNV": { + "core": [ + { + "vuid": "VUID-VkPipelineIndirectDeviceAddressInfoNV-pipelineBindPoint-09079", + "text": " The provided pipelineBindPoint must be of type VK_PIPELINE_BIND_POINT_COMPUTE" + }, + { + "vuid": "VUID-VkPipelineIndirectDeviceAddressInfoNV-pipeline-09080", + "text": " pipeline must have been created with flag VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV set" + }, + { + "vuid": "VUID-VkPipelineIndirectDeviceAddressInfoNV-pipeline-09081", + "text": " pipeline must have been created with a VkComputePipelineIndirectBufferInfoNV structure specifying a valid address where its metadata will be saved" + }, + { + "vuid": "VUID-VkPipelineIndirectDeviceAddressInfoNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV" + }, + { + "vuid": "VUID-VkPipelineIndirectDeviceAddressInfoNV-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkPipelineIndirectDeviceAddressInfoNV-pipelineBindPoint-parameter", + "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" + }, + { + "vuid": "VUID-VkPipelineIndirectDeviceAddressInfoNV-pipeline-parameter", + "text": " pipeline must be a valid VkPipeline handle" } ] }, @@ -58240,6 +58872,10 @@ "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -59404,6 +60040,10 @@ "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-pDynamicStates-08716", "text": " If the bound graphics pipeline state includes a fragment shader stage, was created with VK_DYNAMIC_STATE_STENCIL_WRITE_MASK set in VkPipelineDynamicStateCreateInfo::pDynamicStates, and the fragment shader declares the EarlyFragmentTests execution mode and uses OpStencilAttachmentReadEXT, the writeMask parameter in the last call to vkCmdSetStencilWriteMask must be 0" }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-09116", + "text": " If a shader object is bound to any graphics stage or the currently bound graphics pipeline was created with VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT, and the format of any color attachment is VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, the corresponding element of the pColorWriteMasks parameter of vkCmdSetColorWriteMaskEXT must either include all of VK_COLOR_COMPONENT_R_BIT, VK_COLOR_COMPONENT_G_BIT, and VK_COLOR_COMPONENT_B_BIT, or none of them" + }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04007", "text": " All vertex input bindings accessed via vertex input variables declared in the vertex shader entry point’s interface must have either valid or VK_NULL_HANDLE buffers bound" @@ -59494,7 +60134,7 @@ }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-pipeline-02909", - "text": " VkGeneratedCommandsInfoNV::pipeline must match the current bound pipeline at VkGeneratedCommandsInfoNV::pipelineBindPoint" + "text": " VkGeneratedCommandsInfoNV::pipeline must match the current bound pipeline at VkGeneratedCommandsInfoNV::pipelineBindPoint" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-02910", @@ -59552,6 +60192,22 @@ "vuid": "VUID-VkGeneratedCommandsInfoNV-streamCount-02916", "text": " streamCount must match the indirectCommandsLayout’s streamCount" }, + { + "vuid": "VUID-VkGeneratedCommandsInfoNV-pipelineBindPoint-09084", + "text": " If pipelineBindPoint is of type VK_PIPELINE_BIND_POINT_COMPUTE, then the pipeline must have been created with the flag VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" + }, + { + "vuid": "VUID-VkGeneratedCommandsInfoNV-pipelineBindPoint-09085", + "text": " If pipelineBindPoint is of type VK_PIPELINE_BIND_POINT_COMPUTE, then the pipeline must have been created with a VkComputePipelineIndirectBufferInfoNV structure specifying a valid address where its metadata will be saved" + }, + { + "vuid": "VUID-VkGeneratedCommandsInfoNV-pipelineBindPoint-09086", + "text": " If pipelineBindPoint is of type VK_PIPELINE_BIND_POINT_COMPUTE, then vkCmdUpdatePipelineIndirectBuffer must have been called on that pipeline to save its metadata to a device address" + }, + { + "vuid": "VUID-VkGeneratedCommandsInfoNV-pipelineBindPoint-09087", + "text": " If pipelineBindPoint is of type VK_PIPELINE_BIND_POINT_COMPUTE, and if VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV is used, then pipeline must be VK_NULL_HANDLE" + }, { "vuid": "VUID-VkGeneratedCommandsInfoNV-sequencesCount-02917", "text": " sequencesCount must be less or equal to VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV::maxIndirectSequenceCount and VkGeneratedCommandsMemoryRequirementsInfoNV::maxSequencesCount that was used to determine the preprocessSize" @@ -66192,6 +66848,10 @@ "vuid": "VUID-vkCmdTraceRaysNV-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdTraceRaysNV-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdTraceRaysNV-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -66552,6 +67212,10 @@ "vuid": "VUID-vkCmdTraceRaysKHR-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdTraceRaysKHR-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -67028,6 +67692,10 @@ "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -67452,6 +68120,10 @@ "vuid": "VUID-vkCmdTraceRaysIndirect2KHR-None-02692", "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" }, + { + "vuid": "VUID-vkCmdTraceRaysIndirect2KHR-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, { "vuid": "VUID-vkCmdTraceRaysIndirect2KHR-filterCubic-02694", "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" @@ -71174,6 +71846,14 @@ } ] }, + "VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV": { + "core": [ + { + "vuid": "VUID-VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV" + } + ] + }, "VkPhysicalDeviceDiagnosticsConfigFeaturesNV": { "core": [ { @@ -71498,6 +72178,14 @@ } ] }, + "VkPhysicalDeviceHostImageCopyFeaturesEXT": { + "core": [ + { + "vuid": "VUID-VkPhysicalDeviceHostImageCopyFeaturesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT" + } + ] + }, "VkPhysicalDevicePresentBarrierFeaturesNV": { "core": [ { @@ -72206,6 +72894,22 @@ } ] }, + "VkPhysicalDeviceHostImageCopyPropertiesEXT": { + "core": [ + { + "vuid": "VUID-VkPhysicalDeviceHostImageCopyPropertiesEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT" + }, + { + "vuid": "VUID-VkPhysicalDeviceHostImageCopyPropertiesEXT-pCopySrcLayouts-parameter", + "text": " If copySrcLayoutCount is not 0, and pCopySrcLayouts is not NULL, pCopySrcLayouts must be a valid pointer to an array of copySrcLayoutCount VkImageLayout values" + }, + { + "vuid": "VUID-VkPhysicalDeviceHostImageCopyPropertiesEXT-pCopyDstLayouts-parameter", + "text": " If copyDstLayoutCount is not 0, and pCopyDstLayouts is not NULL, pCopyDstLayouts must be a valid pointer to an array of copyDstLayoutCount VkImageLayout values" + } + ] + }, "VkPhysicalDeviceSubpassShadingPropertiesHUAWEI": { "core": [ { @@ -72524,6 +73228,10 @@ "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-01868", "text": " If the pNext chain of pImageFormatProperties includes a VkAndroidHardwareBufferUsageANDROID structure, the pNext chain of pImageFormatInfo must include a VkPhysicalDeviceExternalImageFormatInfo structure with handleType set to VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID" }, + { + "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties2-pNext-09004", + "text": " If the pNext chain of pImageFormatProperties includes a VkHostImageCopyDevicePerformanceQueryEXT structure, pImageFormatInfo->usage must contain VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT" + }, { "vuid": "VUID-vkGetPhysicalDeviceImageFormatProperties2-physicalDevice-parameter", "text": " physicalDevice must be a valid VkPhysicalDevice handle" @@ -72588,13 +73296,17 @@ }, "VkImageFormatProperties2": { "core": [ + { + "vuid": "VUID-VkImageFormatProperties2-pNext-09005", + "text": " If VkHostImageCopyDevicePerformanceQueryEXT is included in a pNext chain, the hostImageCopy feature must be enabled" + }, { "vuid": "VUID-VkImageFormatProperties2-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2" }, { "vuid": "VUID-VkImageFormatProperties2-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkAndroidHardwareBufferUsageANDROID, VkExternalImageFormatProperties, VkFilterCubicImageViewImageFormatPropertiesEXT, VkImageCompressionPropertiesEXT, VkSamplerYcbcrConversionImageFormatProperties, or VkTextureLODGatherFormatPropertiesAMD" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkAndroidHardwareBufferUsageANDROID, VkExternalImageFormatProperties, VkFilterCubicImageViewImageFormatPropertiesEXT, VkHostImageCopyDevicePerformanceQueryEXT, VkImageCompressionPropertiesEXT, VkSamplerYcbcrConversionImageFormatProperties, or VkTextureLODGatherFormatPropertiesAMD" }, { "vuid": "VUID-VkImageFormatProperties2-sType-unique", @@ -72670,6 +73382,14 @@ } ] }, + "VkHostImageCopyDevicePerformanceQueryEXT": { + "core": [ + { + "vuid": "VUID-VkHostImageCopyDevicePerformanceQueryEXT-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT" + } + ] + }, "VkPhysicalDeviceImageViewImageFormatInfoEXT": { "core": [ { diff --git a/registry/video.xml b/registry/video.xml index 52c3f9b..11b70ca 100644 --- a/registry/video.xml +++ b/registry/video.xml @@ -16,13 +16,19 @@ The current public version of video.xml is maintained in the default branch - #include <stdint.h> + #if !defined(VK_NO_STDINT_H) + #include <stdint.h> +#endif + #include "vulkan_video_codecs_common.h" + #include "vulkan_video_codec_h264std.h" + #include "vulkan_video_codec_h265std.h" + #define VK_MAKE_VIDEO_STD_VERSION(major, minor, patch) \ ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) @@ -1035,6 +1041,7 @@ The current public version of video.xml is maintained in the default branch + @@ -1104,9 +1111,9 @@ The current public version of video.xml is maintained in the default branch - + @@ -1150,12 +1157,18 @@ The current public version of video.xml is maintained in the default branch + + + + + + @@ -1192,6 +1205,7 @@ The current public version of video.xml is maintained in the default branch + diff --git a/registry/vk.xml b/registry/vk.xml index 9cafbd8..f8e93be 100644 --- a/registry/vk.xml +++ b/registry/vk.xml @@ -175,7 +175,7 @@ branch of the member gitlab server. #define VKSC_API_VERSION_1_0 VK_MAKE_API_VERSION(VKSC_API_VARIANT, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 257 +#define VK_HEADER_VERSION 258 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) // Version of this file @@ -465,6 +465,7 @@ typedef void* MTLSharedEvent_id; typedef VkFlags VkSubmitFlags; typedef VkFlags VkImageFormatConstraintsFlagsFUCHSIA; + typedef VkFlags VkHostImageCopyFlagsEXT; typedef VkFlags VkImageConstraintsInfoFlagsFUCHSIA; typedef VkFlags VkGraphicsPipelineLibraryFlagsEXT; typedef VkFlags VkImageCompressionFlagsEXT; @@ -749,6 +750,7 @@ typedef void* MTLSharedEvent_id; + @@ -1441,6 +1443,13 @@ typedef void* MTLSharedEvent_id; VkPipeline basePipelineHandleIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is nonzero, it specifies the handle of the base pipeline this is a derivative of int32_t basePipelineIndexIf VK_PIPELINE_CREATE_DERIVATIVE_BIT is set and this value is not -1, it specifies an index into pCreateInfos of the base pipeline this is a derivative of + + VkStructureType sType + const void* pNext + VkDeviceAddress deviceAddress + VkDeviceSize size + VkDeviceAddress pipelineDeviceAddressCaptureReplay + uint32_t bindingVertex buffer binding id uint32_t strideDistance between vertices in bytes (0 = no advancement) @@ -2327,6 +2336,13 @@ typedef void* MTLSharedEvent_id; void* pNext VkBool32 deviceGeneratedCommands + + VkStructureType sType + void* pNext + VkBool32 deviceGeneratedCompute + VkBool32 deviceGeneratedComputePipelines + VkBool32 deviceGeneratedComputeCaptureReplay + VkStructureType sType const void* pNext @@ -2447,10 +2463,19 @@ typedef void* MTLSharedEvent_id; VkStructureType sType const void* pNext VkPipelineBindPoint pipelineBindPoint - VkPipeline pipeline + VkPipeline pipeline VkIndirectCommandsLayoutNV indirectCommandsLayout uint32_t maxSequencesCount + + VkStructureType sType + const void* pNext + VkPipelineBindPoint pipelineBindPoint + VkPipeline pipeline + + + VkDeviceAddress pipelineAddress + VkStructureType sType void* pNext @@ -6292,6 +6317,89 @@ typedef void* MTLSharedEvent_id; VkBool32 synchronization2 + + VkStructureType sType + void* pNext + VkBool32 hostImageCopy + + + VkStructureType sType + void* pNext + uint32_t copySrcLayoutCount + VkImageLayout* pCopySrcLayouts + uint32_t copyDstLayoutCount + VkImageLayout* pCopyDstLayouts + uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE] + VkBool32 identicalMemoryTypeRequirements + + + VkStructureType sType + const void* pNext + const void* pHostPointer + uint32_t memoryRowLengthSpecified in texels + uint32_t memoryImageHeight + VkImageSubresourceLayers imageSubresource + VkOffset3D imageOffset + VkExtent3D imageExtent + + + VkStructureType sType + const void* pNext + void* pHostPointer + uint32_t memoryRowLengthSpecified in texels + uint32_t memoryImageHeight + VkImageSubresourceLayers imageSubresource + VkOffset3D imageOffset + VkExtent3D imageExtent + + + VkStructureType sType + const void* pNext + VkHostImageCopyFlagsEXT flags + VkImage dstImage + VkImageLayout dstImageLayout + uint32_t regionCount + const VkMemoryToImageCopyEXT* pRegions + + + VkStructureType sType + const void* pNext + VkHostImageCopyFlagsEXT flags + VkImage srcImage + VkImageLayout srcImageLayout + uint32_t regionCount + const VkImageToMemoryCopyEXT* pRegions + + + VkStructureType sType + const void* pNext + VkHostImageCopyFlagsEXT flags + VkImage srcImage + VkImageLayout srcImageLayout + VkImage dstImage + VkImageLayout dstImageLayout + uint32_t regionCount + const VkImageCopy2* pRegions + + + VkStructureType sType + const void* pNext + VkImage image + VkImageLayout oldLayout + VkImageLayout newLayout + VkImageSubresourceRange subresourceRange + + + VkStructureType sType + void* pNext + VkDeviceSize sizeSpecified in bytes + + + VkStructureType sType + void* pNext + VkBool32 optimalDeviceAccessSpecifies if device access is optimal + VkBool32 identicalMemoryLayoutSpecifies if memory layout is identical + VkStructureType sType void* pNext @@ -10143,6 +10251,9 @@ typedef void* MTLSharedEvent_id; + + + @@ -11291,6 +11402,12 @@ typedef void* MTLSharedEvent_id; VkBuffer buffer VkDeviceSize offset
+ + void vkCmdUpdatePipelineIndirectBuffer + VkCommandBuffer commandBuffer + VkPipelineBindPoint pipelineBindPoint + VkPipeline pipeline + void vkCmdCopyBuffer VkCommandBuffer commandBuffer @@ -13357,6 +13474,17 @@ typedef void* MTLSharedEvent_id; VkDevice device VkDeferredOperationKHR operation + + void vkGetPipelineIndirectMemoryRequirementsNV + VkDevice device + const VkComputePipelineCreateInfo* pCreateInfo + VkMemoryRequirements2* pMemoryRequirements + + + VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV + VkDevice device + const VkPipelineIndirectDeviceAddressInfoNV* pInfo + void vkCmdSetCullMode VkCommandBuffer commandBuffer @@ -13813,6 +13941,27 @@ typedef void* MTLSharedEvent_id; uint32_t* pCheckpointDataCount VkCheckpointData2NV* pCheckpointData + + VkResult vkCopyMemoryToImageEXT + VkDevice device + const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo + + + VkResult vkCopyImageToMemoryEXT + VkDevice device + const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo + + + VkResult vkCopyImageToImageEXT + VkDevice device + const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo + + + VkResult vkTransitionImageLayoutEXT + VkDevice device + uint32_t transitionCount + const VkHostImageLayoutTransitionInfoEXT* pTransitions + void vkGetCommandPoolMemoryConsumption VkDevice device @@ -19555,12 +19704,42 @@ typedef void* MTLSharedEvent_id; - + - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -21246,9 +21425,9 @@ typedef void* MTLSharedEvent_id; - + - + @@ -21517,10 +21696,23 @@ typedef void* MTLSharedEvent_id; - + - - + + + + + + + + + + + + + + + @@ -22599,6 +22791,12 @@ typedef void* MTLSharedEvent_id; + + + + + + @@ -24128,21 +24326,21 @@ typedef void* MTLSharedEvent_id; - + - + - + @@ -24159,7 +24357,7 @@ typedef void* MTLSharedEvent_id; - + @@ -24172,14 +24370,14 @@ typedef void* MTLSharedEvent_id; - + - + @@ -24337,12 +24535,12 @@ typedef void* MTLSharedEvent_id; - + - + From cb7b123f2ddc04b86fd106c3a2b2e9872e8215b5 Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Sat, 22 Jul 2023 04:48:49 -0700 Subject: [PATCH 13/17] Update for Vulkan-Docs 1.3.259 --- include/vulkan/vulkan.hpp | 12 +++++------ include/vulkan/vulkan_core.h | 8 +++---- include/vulkan/vulkan_funcs.hpp | 8 +++---- include/vulkan/vulkan_handles.hpp | 6 +++--- include/vulkan/vulkan_raii.hpp | 18 ++++++++-------- registry/validusage.json | 36 +++++++++++++++---------------- registry/video.xml | 1 - registry/vk.xml | 8 +++---- 8 files changed, 48 insertions(+), 49 deletions(-) diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp index f2e6e75..94968e9 100644 --- a/include/vulkan/vulkan.hpp +++ b/include/vulkan/vulkan.hpp @@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 258, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 259, "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION @@ -5726,9 +5726,9 @@ namespace VULKAN_HPP_NAMESPACE } void - vkCmdUpdatePipelineIndirectBuffer( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + vkCmdUpdatePipelineIndirectBufferNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdUpdatePipelineIndirectBuffer( commandBuffer, pipelineBindPoint, pipeline ); + return ::vkCmdUpdatePipelineIndirectBufferNV( commandBuffer, pipelineBindPoint, pipeline ); } VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT @@ -14403,7 +14403,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_device_generated_commands_compute === PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; - PFN_vkCmdUpdatePipelineIndirectBuffer vkCmdUpdatePipelineIndirectBuffer = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; //=== VK_EXT_extended_dynamic_state3 === @@ -15714,7 +15714,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_device_generated_commands_compute === vkGetPipelineIndirectMemoryRequirementsNV = PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); - vkCmdUpdatePipelineIndirectBuffer = PFN_vkCmdUpdatePipelineIndirectBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdatePipelineIndirectBuffer" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetInstanceProcAddr( instance, "vkCmdUpdatePipelineIndirectBufferNV" ) ); vkGetPipelineIndirectDeviceAddressNV = PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectDeviceAddressNV" ) ); @@ -16703,7 +16703,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_device_generated_commands_compute === vkGetPipelineIndirectMemoryRequirementsNV = PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); - vkCmdUpdatePipelineIndirectBuffer = PFN_vkCmdUpdatePipelineIndirectBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBuffer" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); vkGetPipelineIndirectDeviceAddressNV = PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); //=== VK_EXT_extended_dynamic_state3 === diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h index b635c86..50a257b 100644 --- a/include/vulkan/vulkan_core.h +++ b/include/vulkan/vulkan_core.h @@ -69,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 258 +#define VK_HEADER_VERSION 259 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -16429,7 +16429,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryIndirectCountNV( // VK_NV_device_generated_commands_compute is a preprocessor guard. Do not pass it to API calls. #define VK_NV_device_generated_commands_compute 1 -#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION 1 +#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION 2 #define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_EXTENSION_NAME "VK_NV_device_generated_commands_compute" typedef struct VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV { VkStructureType sType; @@ -16459,7 +16459,7 @@ typedef struct VkBindPipelineIndirectCommandNV { } VkBindPipelineIndirectCommandNV; typedef void (VKAPI_PTR *PFN_vkGetPipelineIndirectMemoryRequirementsNV)(VkDevice device, const VkComputePipelineCreateInfo* pCreateInfo, VkMemoryRequirements2* pMemoryRequirements); -typedef void (VKAPI_PTR *PFN_vkCmdUpdatePipelineIndirectBuffer)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef void (VKAPI_PTR *PFN_vkCmdUpdatePipelineIndirectBufferNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetPipelineIndirectDeviceAddressNV)(VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV* pInfo); #ifndef VK_NO_PROTOTYPES @@ -16468,7 +16468,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetPipelineIndirectMemoryRequirementsNV( const VkComputePipelineCreateInfo* pCreateInfo, VkMemoryRequirements2* pMemoryRequirements); -VKAPI_ATTR void VKAPI_CALL vkCmdUpdatePipelineIndirectBuffer( +VKAPI_ATTR void VKAPI_CALL vkCmdUpdatePipelineIndirectBufferNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); diff --git a/include/vulkan/vulkan_funcs.hpp b/include/vulkan/vulkan_funcs.hpp index b8cee1b..f4334ea 100644 --- a/include/vulkan/vulkan_funcs.hpp +++ b/include/vulkan/vulkan_funcs.hpp @@ -21243,12 +21243,12 @@ namespace VULKAN_HPP_NAMESPACE #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::Pipeline pipeline, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdUpdatePipelineIndirectBuffer( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + d.vkCmdUpdatePipelineIndirectBufferNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } template diff --git a/include/vulkan/vulkan_handles.hpp b/include/vulkan/vulkan_handles.hpp index f0545d9..9c6f0f6 100644 --- a/include/vulkan/vulkan_handles.hpp +++ b/include/vulkan/vulkan_handles.hpp @@ -5966,9 +5966,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_device_generated_commands_compute === template - void updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::Pipeline pipeline, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; //=== VK_EXT_extended_dynamic_state3 === diff --git a/include/vulkan/vulkan_raii.hpp b/include/vulkan/vulkan_raii.hpp index df3353f..741f69d 100644 --- a/include/vulkan/vulkan_raii.hpp +++ b/include/vulkan/vulkan_raii.hpp @@ -1576,7 +1576,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_device_generated_commands_compute === vkGetPipelineIndirectMemoryRequirementsNV = PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); - vkCmdUpdatePipelineIndirectBuffer = PFN_vkCmdUpdatePipelineIndirectBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBuffer" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); vkGetPipelineIndirectDeviceAddressNV = PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); @@ -2403,7 +2403,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_device_generated_commands_compute === PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; - PFN_vkCmdUpdatePipelineIndirectBuffer vkCmdUpdatePipelineIndirectBuffer = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; //=== VK_EXT_extended_dynamic_state3 === @@ -5927,8 +5927,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_device_generated_commands_compute === - void updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT; + void updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT; //=== VK_EXT_extended_dynamic_state3 === @@ -19766,13 +19766,13 @@ namespace VULKAN_HPP_NAMESPACE return structureChain; } - VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBuffer( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, - VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdatePipelineIndirectBuffer && - "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV && + "Function requires " ); - getDispatcher()->vkCmdUpdatePipelineIndirectBuffer( + getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } diff --git a/registry/validusage.json b/registry/validusage.json index f549b77..90e0d67 100644 --- a/registry/validusage.json +++ b/registry/validusage.json @@ -1,9 +1,9 @@ { "version info": { "schema version": 2, - "api version": "1.3.258", - "comment": "from git branch: github-main commit: 2c1e07a72b43f53cc7866c55ae32afe97d933621", - "date": "2023-07-21 09:57:58Z" + "api version": "1.3.259", + "comment": "from git branch: github-main commit: 3da7531f2f9d48993ab627c02a866479d5163ba4", + "date": "2023-07-22 10:52:33Z" }, "validation": { "vkGetInstanceProcAddr": { @@ -11570,54 +11570,54 @@ } ] }, - "vkCmdUpdatePipelineIndirectBuffer": { + "vkCmdUpdatePipelineIndirectBufferNV": { "core": [ { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-pipelineBindPoint-09018", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-pipelineBindPoint-09018", "text": " pipelineBindPoint must be VK_PIPELINE_BIND_POINT_COMPUTE" }, { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-pipeline-09019", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-pipeline-09019", "text": " pipeline must have been created with VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV flag set" }, { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-pipeline-09020", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-pipeline-09020", "text": " pipeline must have been created with VkComputePipelineIndirectBufferInfoNV structure specifying a valid address where its metadata will be saved" }, { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-deviceGeneratedComputePipelines-09021", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-deviceGeneratedComputePipelines-09021", "text": " The VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedComputePipelines feature must be enabled" }, { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-commandBuffer-parameter", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" }, { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-pipelineBindPoint-parameter", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-pipelineBindPoint-parameter", "text": " pipelineBindPoint must be a valid VkPipelineBindPoint value" }, { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-pipeline-parameter", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-pipeline-parameter", "text": " pipeline must be a valid VkPipeline handle" }, { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-commandBuffer-recording", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-commandBuffer-recording", "text": " commandBuffer must be in the recording state" }, { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-commandBuffer-cmdpool", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-commandBuffer-cmdpool", "text": " The VkCommandPool that commandBuffer was allocated from must support transfer, graphics, or compute operations" }, { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-renderpass", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-renderpass", "text": " This command must only be called outside of a render pass instance" }, { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-videocoding", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-videocoding", "text": " This command must only be called outside of a video coding scope" }, { - "vuid": "VUID-vkCmdUpdatePipelineIndirectBuffer-commonparent", + "vuid": "VUID-vkCmdUpdatePipelineIndirectBufferNV-commonparent", "text": " Both of commandBuffer, and pipeline must have been created, allocated, or retrieved from the same VkDevice" } ] @@ -58638,7 +58638,7 @@ }, { "vuid": "VUID-VkBindPipelineIndirectCommandNV-None-09093", - "text": " The referenced pipeline must have been updated with vkCmdUpdatePipelineIndirectBuffer" + "text": " The referenced pipeline must have been updated with vkCmdUpdatePipelineIndirectBufferNV" }, { "vuid": "VUID-VkBindPipelineIndirectCommandNV-None-09094", @@ -60202,7 +60202,7 @@ }, { "vuid": "VUID-VkGeneratedCommandsInfoNV-pipelineBindPoint-09086", - "text": " If pipelineBindPoint is of type VK_PIPELINE_BIND_POINT_COMPUTE, then vkCmdUpdatePipelineIndirectBuffer must have been called on that pipeline to save its metadata to a device address" + "text": " If pipelineBindPoint is of type VK_PIPELINE_BIND_POINT_COMPUTE, then vkCmdUpdatePipelineIndirectBufferNV must have been called on that pipeline to save its metadata to a device address" }, { "vuid": "VUID-VkGeneratedCommandsInfoNV-pipelineBindPoint-09087", diff --git a/registry/video.xml b/registry/video.xml index 11b70ca..bb534c3 100644 --- a/registry/video.xml +++ b/registry/video.xml @@ -1167,7 +1167,6 @@ The current public version of video.xml is maintained in the default branch - diff --git a/registry/vk.xml b/registry/vk.xml index f8e93be..5488ca7 100644 --- a/registry/vk.xml +++ b/registry/vk.xml @@ -175,7 +175,7 @@ branch of the member gitlab server. #define VKSC_API_VERSION_1_0 VK_MAKE_API_VERSION(VKSC_API_VARIANT, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 258 +#define VK_HEADER_VERSION 259 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) // Version of this file @@ -11403,7 +11403,7 @@ typedef void* MTLSharedEvent_id; VkDeviceSize offset - void vkCmdUpdatePipelineIndirectBuffer + void vkCmdUpdatePipelineIndirectBufferNV VkCommandBuffer commandBuffer VkPipelineBindPoint pipelineBindPoint VkPipeline pipeline @@ -21698,7 +21698,7 @@ typedef void* MTLSharedEvent_id; - + @@ -21711,7 +21711,7 @@ typedef void* MTLSharedEvent_id; - + From 94bb3c998b9156b9101421f7614617dfcf7f4256 Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Fri, 28 Jul 2023 04:40:18 -0700 Subject: [PATCH 14/17] Update for Vulkan-Docs 1.3.260 --- include/vulkan/vulkan.cppm | 157 +- include/vulkan/vulkan.hpp | 414 +++- include/vulkan/vulkan_beta.h | 113 + include/vulkan/vulkan_core.h | 222 +- include/vulkan/vulkan_enums.hpp | 193 +- .../vulkan/vulkan_extension_inspection.hpp | 20 +- include/vulkan/vulkan_format_traits.hpp | 54 + include/vulkan/vulkan_funcs.hpp | 465 +++- include/vulkan/vulkan_handles.hpp | 214 +- include/vulkan/vulkan_hash.hpp | 247 +- include/vulkan/vulkan_raii.hpp | 383 ++- include/vulkan/vulkan_static_assertions.hpp | 118 +- include/vulkan/vulkan_structs.hpp | 2054 +++++++++++++-- include/vulkan/vulkan_to_string.hpp | 252 +- registry/genvk.py | 2 + registry/parse_dependency.py | 161 +- registry/validusage.json | 2196 +++++++++++++++-- registry/vk.xml | 502 +++- 18 files changed, 7020 insertions(+), 747 deletions(-) diff --git a/include/vulkan/vulkan.cppm b/include/vulkan/vulkan.cppm index e5bd813..cc693b8 100644 --- a/include/vulkan/vulkan.cppm +++ b/include/vulkan/vulkan.cppm @@ -800,6 +800,12 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagBitsNV; using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV; + //=== VK_KHR_maintenance5 === + using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR; + //=== VK_EXT_shader_object === using VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT; using VULKAN_HPP_NAMESPACE::ShaderCreateFlagBitsEXT; @@ -888,50 +894,99 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ResultValue; using VULKAN_HPP_NAMESPACE::ResultValueType; - //========================================= - //=== CONSTEXPR CONSTANTs AND FUNCTIONs === - //========================================= + //=========================== + //=== CONSTEXPR CONSTANTs === + //=========================== + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::AttachmentUnused; + using VULKAN_HPP_NAMESPACE::False; + using VULKAN_HPP_NAMESPACE::LodClampNone; + using VULKAN_HPP_NAMESPACE::MaxDescriptionSize; + using VULKAN_HPP_NAMESPACE::MaxExtensionNameSize; + using VULKAN_HPP_NAMESPACE::MaxMemoryHeaps; + using VULKAN_HPP_NAMESPACE::MaxMemoryTypes; + using VULKAN_HPP_NAMESPACE::MaxPhysicalDeviceNameSize; + using VULKAN_HPP_NAMESPACE::QueueFamilyIgnored; + using VULKAN_HPP_NAMESPACE::RemainingArrayLayers; + using VULKAN_HPP_NAMESPACE::RemainingMipLevels; + using VULKAN_HPP_NAMESPACE::SubpassExternal; + using VULKAN_HPP_NAMESPACE::True; + using VULKAN_HPP_NAMESPACE::UuidSize; + using VULKAN_HPP_NAMESPACE::WholeSize; + + //=== VK_VERSION_1_1 === + using VULKAN_HPP_NAMESPACE::LuidSize; + using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSize; + using VULKAN_HPP_NAMESPACE::QueueFamilyExternal; + + //=== VK_VERSION_1_2 === + using VULKAN_HPP_NAMESPACE::MaxDriverInfoSize; + using VULKAN_HPP_NAMESPACE::MaxDriverNameSize; + + //=== VK_KHR_device_group_creation === + using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSizeKhr; + + //=== VK_KHR_external_memory_capabilities === + using VULKAN_HPP_NAMESPACE::LuidSizeKhr; + + //=== VK_KHR_external_memory === + using VULKAN_HPP_NAMESPACE::QueueFamilyExternalKhr; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + using VULKAN_HPP_NAMESPACE::ShaderIndexUnusedAmdx; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_ray_tracing_pipeline === + using VULKAN_HPP_NAMESPACE::ShaderUnusedKhr; + + //=== VK_NV_ray_tracing === + using VULKAN_HPP_NAMESPACE::ShaderUnusedNv; + + //=== VK_KHR_global_priority === + using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeKhr; + + //=== VK_KHR_driver_properties === + using VULKAN_HPP_NAMESPACE::MaxDriverInfoSizeKhr; + using VULKAN_HPP_NAMESPACE::MaxDriverNameSizeKhr; + + //=== VK_EXT_global_priority_query === + using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeExt; + + //=== VK_EXT_image_sliced_view_of_3d === + using VULKAN_HPP_NAMESPACE::Remaining3DSlicesExt; + + //=== VK_EXT_shader_module_identifier === + using VULKAN_HPP_NAMESPACE::MaxShaderModuleIdentifierSizeExt; + + //======================== + //=== CONSTEXPR VALUEs === + //======================== + using VULKAN_HPP_NAMESPACE::HeaderVersion; + + //========================= + //=== CONSTEXPR CALLEEs === + //========================= + using VULKAN_HPP_NAMESPACE::apiVersionMajor; + using VULKAN_HPP_NAMESPACE::apiVersionMinor; + using VULKAN_HPP_NAMESPACE::apiVersionPatch; + using VULKAN_HPP_NAMESPACE::apiVersionVariant; + using VULKAN_HPP_NAMESPACE::makeApiVersion; + using VULKAN_HPP_NAMESPACE::makeVersion; + using VULKAN_HPP_NAMESPACE::versionMajor; + using VULKAN_HPP_NAMESPACE::versionMinor; + using VULKAN_HPP_NAMESPACE::versionPatch; + + //========================== + //=== CONSTEXPR CALLERSs === + //========================== using VULKAN_HPP_NAMESPACE::ApiVersion; using VULKAN_HPP_NAMESPACE::ApiVersion10; using VULKAN_HPP_NAMESPACE::ApiVersion11; using VULKAN_HPP_NAMESPACE::ApiVersion12; using VULKAN_HPP_NAMESPACE::ApiVersion13; - using VULKAN_HPP_NAMESPACE::apiVersionMajor; - using VULKAN_HPP_NAMESPACE::apiVersionMinor; - using VULKAN_HPP_NAMESPACE::apiVersionPatch; - using VULKAN_HPP_NAMESPACE::apiVersionVariant; - using VULKAN_HPP_NAMESPACE::AttachmentUnused; - using VULKAN_HPP_NAMESPACE::False; - using VULKAN_HPP_NAMESPACE::HeaderVersion; using VULKAN_HPP_NAMESPACE::HeaderVersionComplete; - using VULKAN_HPP_NAMESPACE::LodClampNone; - using VULKAN_HPP_NAMESPACE::LuidSize; - using VULKAN_HPP_NAMESPACE::makeApiVersion; - using VULKAN_HPP_NAMESPACE::makeVersion; - using VULKAN_HPP_NAMESPACE::MaxDescriptionSize; - using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSize; - using VULKAN_HPP_NAMESPACE::MaxDriverInfoSize; - using VULKAN_HPP_NAMESPACE::MaxDriverNameSize; - using VULKAN_HPP_NAMESPACE::MaxExtensionNameSize; - using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeKhr; - using VULKAN_HPP_NAMESPACE::MaxMemoryHeaps; - using VULKAN_HPP_NAMESPACE::MaxMemoryTypes; - using VULKAN_HPP_NAMESPACE::MaxPhysicalDeviceNameSize; - using VULKAN_HPP_NAMESPACE::MaxShaderModuleIdentifierSizeExt; - using VULKAN_HPP_NAMESPACE::QueueFamilyExternal; - using VULKAN_HPP_NAMESPACE::QueueFamilyForeignExt; - using VULKAN_HPP_NAMESPACE::QueueFamilyIgnored; - using VULKAN_HPP_NAMESPACE::Remaining3DSlicesExt; - using VULKAN_HPP_NAMESPACE::RemainingArrayLayers; - using VULKAN_HPP_NAMESPACE::RemainingMipLevels; - using VULKAN_HPP_NAMESPACE::ShaderUnusedKhr; - using VULKAN_HPP_NAMESPACE::SubpassExternal; - using VULKAN_HPP_NAMESPACE::True; - using VULKAN_HPP_NAMESPACE::UuidSize; - using VULKAN_HPP_NAMESPACE::versionMajor; - using VULKAN_HPP_NAMESPACE::versionMinor; - using VULKAN_HPP_NAMESPACE::versionPatch; - using VULKAN_HPP_NAMESPACE::WholeSize; //=============== //=== STRUCTs === @@ -1752,6 +1807,18 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX; + using VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX; + using VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX; + using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX; + using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX; + using VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === using VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT; using VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT; @@ -2070,13 +2137,11 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT; using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT; using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT; - using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT; using VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT; using VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT; using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT; using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT; using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT; - using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT; //=== VK_KHR_map_memory2 === using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR; @@ -2521,6 +2586,18 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_pipeline_protected_access === using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT; + //=== VK_KHR_maintenance5 === + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR; + using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT; + using VULKAN_HPP_NAMESPACE::ImageSubresource2KHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR; + //=== VK_KHR_ray_tracing_position_fetch === using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR; diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp index 94968e9..8f46fc8 100644 --- a/include/vulkan/vulkan.hpp +++ b/include/vulkan/vulkan.hpp @@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 259, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 260, "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION @@ -3991,6 +3991,59 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device, + VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo ); + } + + VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device, + VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex ); + } + + void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, scratch ); + } + + void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, pCountInfo ); + } + + void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, pCountInfo ); + } + + void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, countInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT @@ -4937,8 +4990,8 @@ namespace VULKAN_HPP_NAMESPACE void vkGetImageSubresourceLayout2EXT( VkDevice device, VkImage image, - const VkImageSubresource2EXT * pSubresource, - VkSubresourceLayout2EXT * pLayout ) const VULKAN_HPP_NOEXCEPT + const VkImageSubresource2KHR * pSubresource, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); } @@ -5964,6 +6017,36 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo ); } + //=== VK_KHR_maintenance5 === + + void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType ); + } + + void vkGetRenderingAreaGranularityKHR( VkDevice device, + const VkRenderingAreaInfoKHR * pRenderingAreaInfo, + VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity ); + } + + void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, + const VkDeviceImageSubresourceInfoKHR * pInfo, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout ); + } + + void vkGetImageSubresourceLayout2KHR( VkDevice device, + VkImage image, + const VkImageSubresource2KHR * pSubresource, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout ); + } + //=== VK_EXT_shader_object === VkResult vkCreateShadersEXT( VkDevice device, @@ -6920,35 +7003,80 @@ namespace VULKAN_HPP_NAMESPACE #endif } - //========================================= - //=== CONSTEXPR CONSTANTs AND FUNCTIONs === - //========================================= - VULKAN_HPP_CONSTEXPR_INLINE uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t False = VK_FALSE; - VULKAN_HPP_CONSTEXPR_INLINE float LodClampNone = VK_LOD_CLAMP_NONE; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSize = VK_LUID_SIZE; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeKhr = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; + //=========================== + //=== CONSTEXPR CONSTANTs === + //=========================== + + //=== VK_VERSION_1_0 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t AttachmentUnused = VK_ATTACHMENT_UNUSED; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t False = VK_FALSE; + VULKAN_HPP_CONSTEXPR_INLINE float LodClampNone = VK_LOD_CLAMP_NONE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t True = VK_TRUE; + VULKAN_HPP_CONSTEXPR_INLINE uint64_t WholeSize = VK_WHOLE_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryTypes = VK_MAX_MEMORY_TYPES; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPhysicalDeviceNameSize = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t UuidSize = VK_UUID_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxExtensionNameSize = VK_MAX_EXTENSION_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDescriptionSize = VK_MAX_DESCRIPTION_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxMemoryHeaps = VK_MAX_MEMORY_HEAPS; + + //=== VK_VERSION_1_1 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSize = VK_MAX_DEVICE_GROUP_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSize = VK_LUID_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL; + + //=== VK_VERSION_1_2 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE; + + //=== VK_KHR_device_group_creation === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSizeKhr = VK_MAX_DEVICE_GROUP_SIZE_KHR; + + //=== VK_KHR_external_memory_capabilities === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t LuidSizeKhr = VK_LUID_SIZE_KHR; + + //=== VK_KHR_external_memory === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternalKhr = VK_QUEUE_FAMILY_EXTERNAL_KHR; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderIndexUnusedAmdx = VK_SHADER_INDEX_UNUSED_AMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_KHR_ray_tracing_pipeline === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKhr = VK_SHADER_UNUSED_KHR; + + //=== VK_NV_ray_tracing === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedNv = VK_SHADER_UNUSED_NV; + + //=== VK_KHR_global_priority === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeKhr = VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; + + //=== VK_KHR_driver_properties === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSizeKhr = VK_MAX_DRIVER_NAME_SIZE_KHR; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSizeKhr = VK_MAX_DRIVER_INFO_SIZE_KHR; + + //=== VK_EXT_global_priority_query === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySizeExt = VK_MAX_GLOBAL_PRIORITY_SIZE_EXT; + + //=== VK_EXT_image_sliced_view_of_3d === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t Remaining3DSlicesExt = VK_REMAINING_3D_SLICES_EXT; + + //=== VK_EXT_shader_module_identifier === VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxShaderModuleIdentifierSizeExt = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyExternal = VK_QUEUE_FAMILY_EXTERNAL; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyForeignExt = VK_QUEUE_FAMILY_FOREIGN_EXT; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t QueueFamilyIgnored = VK_QUEUE_FAMILY_IGNORED; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t Remaining3DSlicesExt = VK_REMAINING_3D_SLICES_EXT; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingArrayLayers = VK_REMAINING_ARRAY_LAYERS; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t RemainingMipLevels = VK_REMAINING_MIP_LEVELS; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t ShaderUnusedKhr = VK_SHADER_UNUSED_KHR; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t SubpassExternal = VK_SUBPASS_EXTERNAL; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t True = VK_TRUE; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t UuidSize = VK_UUID_SIZE; - VULKAN_HPP_CONSTEXPR_INLINE uint64_t WholeSize = VK_WHOLE_SIZE; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION; + + //======================== + //=== CONSTEXPR VALUEs === + //======================== + VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION; + + //========================= + //=== CONSTEXPR CALLEEs === + //========================= template ::value>::type> VULKAN_HPP_CONSTEXPR uint32_t apiVersionMajor( T const version ) { @@ -6998,6 +7126,10 @@ namespace VULKAN_HPP_NAMESPACE { return ( ( uint32_t )(version)&0xFFFU ); } + + //========================= + //=== CONSTEXPR CALLERs === + //========================= VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion = makeApiVersion( 0, 1, 0, 0 ); VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion10 = makeApiVersion( 0, 1, 0, 0 ); VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion11 = makeApiVersion( 0, 1, 1, 0 ); @@ -7932,6 +8064,16 @@ namespace VULKAN_HPP_NAMESPACE value = true }; }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ template <> struct StructExtends { @@ -9375,6 +9517,42 @@ namespace VULKAN_HPP_NAMESPACE }; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === template <> struct StructExtends @@ -9820,6 +9998,16 @@ namespace VULKAN_HPP_NAMESPACE value = true }; }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct StructExtends + { + enum + { + value = true + }; + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_AMD_shader_core_properties === template <> @@ -10679,7 +10867,7 @@ namespace VULKAN_HPP_NAMESPACE }; }; template <> - struct StructExtends + struct StructExtends { enum { @@ -11755,7 +11943,7 @@ namespace VULKAN_HPP_NAMESPACE }; }; template <> - struct StructExtends + struct StructExtends { enum { @@ -12941,6 +13129,96 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_KHR_maintenance5 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_KHR_ray_tracing_position_fetch === template <> struct StructExtends @@ -13966,6 +14244,25 @@ namespace VULKAN_HPP_NAMESPACE PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +#else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; @@ -14450,6 +14747,12 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; + //=== VK_KHR_maintenance5 === + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; + //=== VK_EXT_shader_object === PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; @@ -15145,6 +15448,20 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetInstanceProcAddr( instance, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetInstanceProcAddr( instance, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); vkGetPhysicalDeviceMultisamplePropertiesEXT = @@ -15449,6 +15766,8 @@ namespace VULKAN_HPP_NAMESPACE vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; //=== VK_KHR_map_memory2 === vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); @@ -15769,6 +16088,13 @@ namespace VULKAN_HPP_NAMESPACE vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) ); vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) ); + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2KHR" ) ); + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularityKHR" ) ); + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2KHR" ) ); + //=== VK_EXT_shader_object === vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) ); vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) ); @@ -16202,6 +16528,20 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); @@ -16466,6 +16806,8 @@ namespace VULKAN_HPP_NAMESPACE vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; //=== VK_KHR_map_memory2 === vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); @@ -16754,6 +17096,12 @@ namespace VULKAN_HPP_NAMESPACE vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + vkGetDeviceImageSubresourceLayoutKHR = PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + //=== VK_EXT_shader_object === vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); diff --git a/include/vulkan/vulkan_beta.h b/include/vulkan/vulkan_beta.h index 75fabd4..f00ed3f 100644 --- a/include/vulkan/vulkan_beta.h +++ b/include/vulkan/vulkan_beta.h @@ -643,6 +643,119 @@ typedef struct VkVideoEncodeH265GopRemainingFrameInfoEXT { +// VK_AMDX_shader_enqueue is a preprocessor guard. Do not pass it to API calls. +#define VK_AMDX_shader_enqueue 1 +#define VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION 1 +#define VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME "VK_AMDX_shader_enqueue" +#define VK_SHADER_INDEX_UNUSED_AMDX (~0U) +typedef struct VkPhysicalDeviceShaderEnqueueFeaturesAMDX { + VkStructureType sType; + void* pNext; + VkBool32 shaderEnqueue; +} VkPhysicalDeviceShaderEnqueueFeaturesAMDX; + +typedef struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX { + VkStructureType sType; + void* pNext; + uint32_t maxExecutionGraphDepth; + uint32_t maxExecutionGraphShaderOutputNodes; + uint32_t maxExecutionGraphShaderPayloadSize; + uint32_t maxExecutionGraphShaderPayloadCount; + uint32_t executionGraphDispatchAddressAlignment; +} VkPhysicalDeviceShaderEnqueuePropertiesAMDX; + +typedef struct VkExecutionGraphPipelineScratchSizeAMDX { + VkStructureType sType; + void* pNext; + VkDeviceSize size; +} VkExecutionGraphPipelineScratchSizeAMDX; + +typedef struct VkExecutionGraphPipelineCreateInfoAMDX { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkExecutionGraphPipelineCreateInfoAMDX; + +typedef union VkDeviceOrHostAddressConstAMDX { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstAMDX; + +typedef struct VkDispatchGraphInfoAMDX { + uint32_t nodeIndex; + uint32_t payloadCount; + VkDeviceOrHostAddressConstAMDX payloads; + uint64_t payloadStride; +} VkDispatchGraphInfoAMDX; + +typedef struct VkDispatchGraphCountInfoAMDX { + uint32_t count; + VkDeviceOrHostAddressConstAMDX infos; + uint64_t stride; +} VkDispatchGraphCountInfoAMDX; + +typedef struct VkPipelineShaderStageNodeCreateInfoAMDX { + VkStructureType sType; + const void* pNext; + const char* pName; + uint32_t index; +} VkPipelineShaderStageNodeCreateInfoAMDX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateExecutionGraphPipelinesAMDX)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)(VkDevice device, VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)(VkDevice device, VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex); +typedef void (VKAPI_PTR *PFN_vkCmdInitializeGraphScratchMemoryAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectCountAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateExecutionGraphPipelinesAMDX( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineScratchSizeAMDX( + VkDevice device, + VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineNodeIndexAMDX( + VkDevice device, + VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, + uint32_t* pNodeIndex); + +VKAPI_ATTR void VKAPI_CALL vkCmdInitializeGraphScratchMemoryAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX* pCountInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX* pCountInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectCountAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceAddress countInfo); +#endif + + // VK_NV_displacement_micromap is a preprocessor guard. Do not pass it to API calls. #define VK_NV_displacement_micromap 1 #define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 2 diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h index 50a257b..78b0da7 100644 --- a/include/vulkan/vulkan_core.h +++ b/include/vulkan/vulkan_core.h @@ -69,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 259 +#define VK_HEADER_VERSION 260 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -646,6 +646,21 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004, VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005, VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID = 1000129006, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX = 1000134000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX = 1000134001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX = 1000134002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX = 1000134003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX = 1000134004, +#endif VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, @@ -937,8 +952,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT = 1000338000, VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT = 1000338001, - VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = 1000338002, - VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = 1000338003, VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT = 1000338004, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT = 1000339000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000, @@ -1065,6 +1078,14 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = 1000466000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR = 1000470000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR = 1000470001, + VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR = 1000470003, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR = 1000470004, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR = 1000338002, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR = 1000338003, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR = 1000470005, + VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR = 1000470006, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR = 1000481000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT = 1000482000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT = 1000482001, @@ -1255,6 +1276,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, @@ -1652,6 +1675,8 @@ typedef enum VkFormat { VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, VK_FORMAT_R16G16_S10_5_NV = 1000464000, + VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR = 1000470000, + VK_FORMAT_A8_UNORM_KHR = 1000470001, VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK, VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK, @@ -2111,6 +2136,9 @@ typedef enum VkAttachmentStoreOp { typedef enum VkPipelineBindPoint { VK_PIPELINE_BIND_POINT_GRAPHICS = 0, VK_PIPELINE_BIND_POINT_COMPUTE = 1, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX = 1000134000, +#endif VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000, VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI = 1000369003, VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, @@ -2499,6 +2527,9 @@ typedef enum VkBufferUsageFlagBits { VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800, VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000, VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000, +#endif VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000, VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000, VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400, @@ -10399,6 +10430,171 @@ VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR( #endif +// VK_KHR_maintenance5 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance5 1 +#define VK_KHR_MAINTENANCE_5_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_5_EXTENSION_NAME "VK_KHR_maintenance5" +typedef VkFlags64 VkPipelineCreateFlags2KHR; + +// Flag bits for VkPipelineCreateFlagBits2KHR +typedef VkFlags64 VkPipelineCreateFlagBits2KHR; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RESERVED_BIT_28_NV = 0x10000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_KHR = 0x00000020ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR = 0x00000040ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR = 0x00000100ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR = 0x00000200ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_KHR = 0x00000400ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_KHR = 0x00800000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR = 0x00000800ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_KHR = 0x00040000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_KHR = 0x00100000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_KHR = 0x00400000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_KHR = 0x01000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR = 0x02000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR = 0x04000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_KHR = 0x08000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_KHR = 0x40000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_KHR = 0x20000000ULL; + +typedef VkFlags64 VkBufferUsageFlags2KHR; + +// Flag bits for VkBufferUsageFlagBits2KHR +typedef VkFlags64 VkBufferUsageFlagBits2KHR; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR = 0x00000001ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR = 0x00000002ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000004ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR = 0x00000020ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR = 0x00000080ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR = 0x00000100ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_KHR = 0x00000200ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RAY_TRACING_BIT_KHR = 0x00000400ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_KHR = 0x00000800ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_KHR = 0x00001000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR = 0x00004000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000ULL; +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000ULL; +#endif +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR = 0x00020000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_KHR = 0x00200000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_KHR = 0x00400000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_KHR = 0x04000000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00800000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_KHR = 0x01000000ULL; + +typedef struct VkPhysicalDeviceMaintenance5FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 maintenance5; +} VkPhysicalDeviceMaintenance5FeaturesKHR; + +typedef struct VkPhysicalDeviceMaintenance5PropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting; + VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting; + VkBool32 depthStencilSwizzleOneSupport; + VkBool32 polygonModePointSize; + VkBool32 nonStrictSinglePixelWideLinesUseParallelogram; + VkBool32 nonStrictWideLinesUseParallelogram; +} VkPhysicalDeviceMaintenance5PropertiesKHR; + +typedef struct VkRenderingAreaInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; +} VkRenderingAreaInfoKHR; + +typedef struct VkImageSubresource2KHR { + VkStructureType sType; + void* pNext; + VkImageSubresource imageSubresource; +} VkImageSubresource2KHR; + +typedef struct VkDeviceImageSubresourceInfoKHR { + VkStructureType sType; + const void* pNext; + const VkImageCreateInfo* pCreateInfo; + const VkImageSubresource2KHR* pSubresource; +} VkDeviceImageSubresourceInfoKHR; + +typedef struct VkSubresourceLayout2KHR { + VkStructureType sType; + void* pNext; + VkSubresourceLayout subresourceLayout; +} VkSubresourceLayout2KHR; + +typedef struct VkPipelineCreateFlags2CreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags2KHR flags; +} VkPipelineCreateFlags2CreateInfoKHR; + +typedef struct VkBufferUsageFlags2CreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBufferUsageFlags2KHR usage; +} VkBufferUsageFlags2CreateInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer2KHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkGetRenderingAreaGranularityKHR)(VkDevice device, const VkRenderingAreaInfoKHR* pRenderingAreaInfo, VkExtent2D* pGranularity); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSubresourceLayoutKHR)(VkDevice device, const VkDeviceImageSubresourceInfoKHR* pInfo, VkSubresourceLayout2KHR* pLayout); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2KHR)(VkDevice device, VkImage image, const VkImageSubresource2KHR* pSubresource, VkSubresourceLayout2KHR* pLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer2KHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkDeviceSize size, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderingAreaGranularityKHR( + VkDevice device, + const VkRenderingAreaInfoKHR* pRenderingAreaInfo, + VkExtent2D* pGranularity); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSubresourceLayoutKHR( + VkDevice device, + const VkDeviceImageSubresourceInfoKHR* pInfo, + VkSubresourceLayout2KHR* pLayout); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2KHR( + VkDevice device, + VkImage image, + const VkImageSubresource2KHR* pSubresource, + VkSubresourceLayout2KHR* pLayout); +#endif + + // VK_KHR_ray_tracing_position_fetch is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_ray_tracing_position_fetch 1 #define VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION 1 @@ -13993,23 +14189,15 @@ typedef struct VkHostImageCopyDevicePerformanceQueryEXT { VkBool32 identicalMemoryLayout; } VkHostImageCopyDevicePerformanceQueryEXT; -typedef struct VkSubresourceLayout2EXT { - VkStructureType sType; - void* pNext; - VkSubresourceLayout subresourceLayout; -} VkSubresourceLayout2EXT; +typedef VkSubresourceLayout2KHR VkSubresourceLayout2EXT; -typedef struct VkImageSubresource2EXT { - VkStructureType sType; - void* pNext; - VkImageSubresource imageSubresource; -} VkImageSubresource2EXT; +typedef VkImageSubresource2KHR VkImageSubresource2EXT; typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImageEXT)(VkDevice device, const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo); typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemoryEXT)(VkDevice device, const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo); typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImageEXT)(VkDevice device, const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo); typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayoutEXT)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT* pTransitions); -typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2KHR* pSubresource, VkSubresourceLayout2KHR* pLayout); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImageEXT( @@ -14032,8 +14220,8 @@ VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayoutEXT( VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT( VkDevice device, VkImage image, - const VkImageSubresource2EXT* pSubresource, - VkSubresourceLayout2EXT* pLayout); + const VkImageSubresource2KHR* pSubresource, + VkSubresourceLayout2KHR* pLayout); #endif @@ -14945,7 +15133,7 @@ typedef struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT { typedef struct VkGraphicsPipelineLibraryCreateInfoEXT { VkStructureType sType; - void* pNext; + const void* pNext; VkGraphicsPipelineLibraryFlagsEXT flags; } VkGraphicsPipelineLibraryCreateInfoEXT; diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp index 27cb036..effa298 100644 --- a/include/vulkan/vulkan_enums.hpp +++ b/include/vulkan/vulkan_enums.hpp @@ -577,8 +577,15 @@ namespace VULKAN_HPP_NAMESPACE eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, eAndroidHardwareBufferFormatProperties2ANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID, #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, - eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + ePhysicalDeviceShaderEnqueueFeaturesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX, + ePhysicalDeviceShaderEnqueuePropertiesAMDX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX, + eExecutionGraphPipelineScratchSizeAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX, + eExecutionGraphPipelineCreateInfoAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX, + ePipelineShaderStageNodeCreateInfoAMDX = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, @@ -1090,6 +1097,14 @@ namespace VULKAN_HPP_NAMESPACE eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV, ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT, ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT, + ePhysicalDeviceMaintenance5FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR, + ePhysicalDeviceMaintenance5PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR, + eRenderingAreaInfoKHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR, + eDeviceImageSubresourceInfoKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR, + eSubresourceLayout2KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, + eImageSubresource2KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, + ePipelineCreateFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR, + eBufferUsageFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR, ePhysicalDeviceRayTracingPositionFetchFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR, ePhysicalDeviceShaderObjectFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT, ePhysicalDeviceShaderObjectPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT, @@ -1500,7 +1515,9 @@ namespace VULKAN_HPP_NAMESPACE eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT, eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT, eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT, - eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV + eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV, + eA1B5G5R5UnormPack16KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR, + eA8UnormKHR = VK_FORMAT_A8_UNORM_KHR }; enum class FormatFeatureFlagBits : VkFormatFeatureFlags @@ -2115,21 +2132,24 @@ namespace VULKAN_HPP_NAMESPACE enum class BufferUsageFlagBits : VkBufferUsageFlags { - eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, - eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, - eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, - eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, - eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, - eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, - eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, - eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, - eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, - eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, - eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR, - eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR, - eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, - eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, - eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, + eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, + eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, @@ -2158,8 +2178,11 @@ namespace VULKAN_HPP_NAMESPACE BufferUsageFlagBits::eStorageTexelBuffer | BufferUsageFlagBits::eUniformBuffer | BufferUsageFlagBits::eStorageBuffer | BufferUsageFlagBits::eIndexBuffer | BufferUsageFlagBits::eVertexBuffer | BufferUsageFlagBits::eIndirectBuffer | BufferUsageFlagBits::eShaderDeviceAddress | BufferUsageFlagBits::eVideoDecodeSrcKHR | BufferUsageFlagBits::eVideoDecodeDstKHR | BufferUsageFlagBits::eTransformFeedbackBufferEXT | - BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits::eConditionalRenderingEXT | - BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR | BufferUsageFlagBits::eAccelerationStructureStorageKHR | + BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits::eConditionalRenderingEXT +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BufferUsageFlagBits::eExecutionGraphScratchAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR | BufferUsageFlagBits::eAccelerationStructureStorageKHR | BufferUsageFlagBits::eShaderBindingTableKHR #if defined( VK_ENABLE_BETA_EXTENSIONS ) | BufferUsageFlagBits::eVideoEncodeDstKHR | BufferUsageFlagBits::eVideoEncodeSrcKHR @@ -3046,8 +3069,11 @@ namespace VULKAN_HPP_NAMESPACE enum class PipelineBindPoint { - eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, - eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, + eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, + eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphAMDX = VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV, eSubpassShadingHUAWEI = VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI @@ -6608,6 +6634,129 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints; }; + //=== VK_KHR_maintenance5 === + + enum class PipelineCreateFlagBits2KHR : VkPipelineCreateFlags2KHR + { + eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR, + eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR, + eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eReserved28NV = VK_PIPELINE_CREATE_2_RESERVED_BIT_28_NV, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, + eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR, + eDeferCompile = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_KHR, + eCaptureStatistics = VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentations = VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR, + eEarlyReturnOnFailure = VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR, + eLinkTimeOptimization = VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_KHR, + eRetainLinkTimeOptimizationInfo = VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_KHR, + eLibrary = VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR, + eRayTracingSkipTriangles = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipAabbs = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR, + eRayTracingNoNullAnyHitShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingShaderGroupHandleCaptureReplay = VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, + eIndirectBindable = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_KHR, + eRayTracingAllowMotion = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_KHR, + eRenderingFragmentShadingRateAttachment = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eRenderingFragmentDensityMapAttachment = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_KHR, + eRayTracingOpacityMicromap = VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_KHR, + eColorAttachmentFeedbackLoop = VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR, + eDepthStencilAttachmentFeedbackLoop = VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR, + eNoProtectedAccess = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_KHR, + eProtectedAccessOnly = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_KHR, + eDescriptorBuffer = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_KHR + }; + + using PipelineCreateFlags2KHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2KHR allFlags = + PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | PipelineCreateFlagBits2KHR::eReserved28NV +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompile | + PipelineCreateFlagBits2KHR::eCaptureStatistics | PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations | + PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired | PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure | + PipelineCreateFlagBits2KHR::eLinkTimeOptimization | PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo | PipelineCreateFlagBits2KHR::eLibrary | + PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles | PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs | + PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders | + PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders | + PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay | PipelineCreateFlagBits2KHR::eIndirectBindable | + PipelineCreateFlagBits2KHR::eRayTracingAllowMotion | PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment | + PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment | PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap | + PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop | PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop | + PipelineCreateFlagBits2KHR::eNoProtectedAccess | PipelineCreateFlagBits2KHR::eProtectedAccessOnly | PipelineCreateFlagBits2KHR::eDescriptorBuffer; + }; + + enum class BufferUsageFlagBits2KHR : VkBufferUsageFlags2KHR + { + eTransferSrc = VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR, + eTransferDst = VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR, + eUniformTexelBuffer = VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR, + eStorageTexelBuffer = VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR, + eUniformBuffer = VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR, + eStorageBuffer = VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR, + eIndexBuffer = VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR, + eVertexBuffer = VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR, + eIndirectBuffer = VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eConditionalRendering = VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_KHR, + eShaderBindingTable = VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR, + eRayTracing = VK_BUFFER_USAGE_2_RAY_TRACING_BIT_KHR, + eTransformFeedbackBuffer = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_KHR, + eTransformFeedbackCounterBuffer = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_KHR, + eVideoDecodeSrc = VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDst = VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eVideoEncodeDst = VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrc = VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eShaderDeviceAddress = VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR, + eAccelerationStructureBuildInputReadOnly = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, + eAccelerationStructureStorage = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, + eSamplerDescriptorBuffer = VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_KHR, + eResourceDescriptorBuffer = VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_KHR, + ePushDescriptorsDescriptorBuffer = VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_KHR, + eMicromapBuildInputReadOnly = VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_KHR, + eMicromapStorage = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_KHR + }; + + using BufferUsageFlags2KHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags2KHR allFlags = + BufferUsageFlagBits2KHR::eTransferSrc | BufferUsageFlagBits2KHR::eTransferDst | BufferUsageFlagBits2KHR::eUniformTexelBuffer | + BufferUsageFlagBits2KHR::eStorageTexelBuffer | BufferUsageFlagBits2KHR::eUniformBuffer | BufferUsageFlagBits2KHR::eStorageBuffer | + BufferUsageFlagBits2KHR::eIndexBuffer | BufferUsageFlagBits2KHR::eVertexBuffer | BufferUsageFlagBits2KHR::eIndirectBuffer +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BufferUsageFlagBits2KHR::eConditionalRendering | BufferUsageFlagBits2KHR::eShaderBindingTable | BufferUsageFlagBits2KHR::eTransformFeedbackBuffer | + BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer | BufferUsageFlagBits2KHR::eVideoDecodeSrc | BufferUsageFlagBits2KHR::eVideoDecodeDst +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BufferUsageFlagBits2KHR::eVideoEncodeDst | BufferUsageFlagBits2KHR::eVideoEncodeSrc +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BufferUsageFlagBits2KHR::eShaderDeviceAddress | BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly | + BufferUsageFlagBits2KHR::eAccelerationStructureStorage | BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer | + BufferUsageFlagBits2KHR::eResourceDescriptorBuffer | BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer | + BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly | BufferUsageFlagBits2KHR::eMicromapStorage; + }; + //=== VK_EXT_shader_object === enum class ShaderCreateFlagBitsEXT : VkShaderCreateFlagsEXT diff --git a/include/vulkan/vulkan_extension_inspection.hpp b/include/vulkan/vulkan_extension_inspection.hpp index 5084b29..548a32f 100644 --- a/include/vulkan/vulkan_extension_inspection.hpp +++ b/include/vulkan/vulkan_extension_inspection.hpp @@ -164,7 +164,10 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ "VK_EXT_sampler_filter_minmax", "VK_KHR_storage_buffer_storage_class", -"VK_AMD_gpu_shader_int16", +"VK_AMD_gpu_shader_int16", +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +"VK_AMDX_shader_enqueue", +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ "VK_AMD_mixed_attachment_samples", "VK_AMD_shader_fragment_mask", "VK_EXT_inline_uniform_block", @@ -379,6 +382,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_NV_optical_flow", "VK_EXT_legacy_dithering", "VK_EXT_pipeline_protected_access", +"VK_KHR_maintenance5", "VK_KHR_ray_tracing_position_fetch", "VK_EXT_shader_object", "VK_QCOM_tile_properties", @@ -579,7 +583,10 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_USE_PLATFORM_ANDROID_KHR ) { "VK_ANDROID_external_memory_android_hardware_buffer", { { "VK_VERSION_1_0", { { "VK_KHR_sampler_ycbcr_conversion", "VK_KHR_external_memory", "VK_EXT_queue_family_foreign", "VK_KHR_dedicated_allocation", } } } } }, #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ -{ "VK_EXT_sampler_filter_minmax", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } }, +{ "VK_EXT_sampler_filter_minmax", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } }, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) +{ "VK_AMDX_shader_enqueue", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_synchronization2", "VK_KHR_pipeline_library", "VK_KHR_spirv_1_4", } } } } }, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ { "VK_EXT_inline_uniform_block", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_maintenance1", } } } } }, { "VK_EXT_sample_locations", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } }, { "VK_EXT_blend_operation_advanced", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } }, @@ -774,6 +781,7 @@ namespace VULKAN_HPP_NAMESPACE { "VK_NV_optical_flow", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_format_feature_flags2", "VK_KHR_synchronization2", } } } } }, { "VK_EXT_legacy_dithering", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } }, { "VK_EXT_pipeline_protected_access", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } }, +{ "VK_KHR_maintenance5", { { "VK_VERSION_1_1", { { "VK_KHR_dynamic_rendering", } } } } }, { "VK_KHR_ray_tracing_position_fetch", { { "VK_VERSION_1_0", { { "VK_KHR_acceleration_structure", } } } } }, { "VK_EXT_shader_object", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", "VK_KHR_dynamic_rendering", } } }, { "VK_VERSION_1_1", { { "VK_KHR_dynamic_rendering", } } }, { "VK_VERSION_1_3", { { } } } } }, { "VK_QCOM_tile_properties", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } } } }, @@ -1386,7 +1394,11 @@ namespace VULKAN_HPP_NAMESPACE || ( extension == "VK_ANDROID_external_memory_android_hardware_buffer" ) #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ || ( extension == "VK_EXT_sampler_filter_minmax" ) || ( extension == "VK_KHR_storage_buffer_storage_class" ) || - ( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) || + ( extension == "VK_AMD_gpu_shader_int16" ) +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + || ( extension == "VK_AMDX_shader_enqueue" ) +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + || ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) || ( extension == "VK_EXT_inline_uniform_block" ) || ( extension == "VK_EXT_shader_stencil_export" ) || ( extension == "VK_EXT_sample_locations" ) || ( extension == "VK_KHR_relaxed_block_layout" ) || ( extension == "VK_KHR_get_memory_requirements2" ) || ( extension == "VK_KHR_image_format_list" ) || ( extension == "VK_EXT_blend_operation_advanced" ) || @@ -1493,7 +1505,7 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_EXT_external_memory_acquire_unmodified" ) || ( extension == "VK_EXT_extended_dynamic_state3" ) || ( extension == "VK_EXT_subpass_merge_feedback" ) || ( extension == "VK_EXT_shader_module_identifier" ) || ( extension == "VK_EXT_rasterization_order_attachment_access" ) || ( extension == "VK_NV_optical_flow" ) || - ( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" ) || + ( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" ) || ( extension == "VK_KHR_maintenance5" ) || ( extension == "VK_KHR_ray_tracing_position_fetch" ) || ( extension == "VK_EXT_shader_object" ) || ( extension == "VK_QCOM_tile_properties" ) || ( extension == "VK_SEC_amigo_profiling" ) || ( extension == "VK_QCOM_multiview_per_view_viewports" ) || ( extension == "VK_NV_ray_tracing_invocation_reorder" ) || ( extension == "VK_EXT_mutable_descriptor_type" ) || diff --git a/include/vulkan/vulkan_format_traits.hpp b/include/vulkan/vulkan_format_traits.hpp index 7d72bbe..16cbabb 100644 --- a/include/vulkan/vulkan_format_traits.hpp +++ b/include/vulkan/vulkan_format_traits.hpp @@ -362,6 +362,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; default: VULKAN_HPP_ASSERT( false ); return 0; } @@ -619,6 +621,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC2_2BPP"; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC2_4BPP"; case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return "8-bit alpha"; default: VULKAN_HPP_ASSERT( false ); return ""; } @@ -2007,6 +2011,21 @@ namespace VULKAN_HPP_NAMESPACE case 1: return 16; default: VULKAN_HPP_ASSERT( false ); return 0; } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: + switch ( component ) + { + case 0: return 1; + case 1: return 5; + case 2: return 5; + case 3: return 5; + default: VULKAN_HPP_ASSERT( false ); return 0; + } + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: + switch ( component ) + { + case 0: return 8; + default: VULKAN_HPP_ASSERT( false ); return 0; + } default: return 0; } @@ -2264,6 +2283,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; default: return 0; } @@ -4284,6 +4305,21 @@ namespace VULKAN_HPP_NAMESPACE case 1: return "G"; default: VULKAN_HPP_ASSERT( false ); return ""; } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: + switch ( component ) + { + case 0: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } default: return ""; } @@ -6304,6 +6340,21 @@ namespace VULKAN_HPP_NAMESPACE case 1: return "SINT"; default: VULKAN_HPP_ASSERT( false ); return ""; } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } default: return ""; } @@ -6744,6 +6795,7 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16; case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16; case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 16; default: return 0; } @@ -7605,6 +7657,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1; case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; default: VULKAN_HPP_ASSERT( false ); return 0; } diff --git a/include/vulkan/vulkan_funcs.hpp b/include/vulkan/vulkan_funcs.hpp index f4334ea..1ab8736 100644 --- a/include/vulkan/vulkan_funcs.hpp +++ b/include/vulkan/vulkan_funcs.hpp @@ -13584,6 +13584,318 @@ namespace VULKAN_HPP_NAMESPACE # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateExecutionGraphPipelinesAMDX( m_device, + static_cast( pipelineCache ), + createInfoCount, + reinterpret_cast( pCreateInfos ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pPipelines ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size(), pipelineAllocator ); + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( static_cast( result ), pipelines ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue + Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue( static_cast( result ), pipeline ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines; + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template >::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> + Device::createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + std::vector pipelines( createInfos.size() ); + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( pipelines.data() ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); + uniquePipelines.reserve( createInfos.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipeline : pipelines ) + { + uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); + } + return ResultValue, PipelineAllocator>>( + static_cast( result ), std::move( uniquePipelines ) ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Pipeline pipeline; + VkResult result = d.vkCreateExecutionGraphPipelinesAMDX( + m_device, + static_cast( pipelineCache ), + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &pipeline ) ); + resultCheck( static_cast( result ), + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + + return ResultValue>( + static_cast( result ), + UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( pSizeInfo ) ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; + VkResult result = d.vkGetExecutionGraphPipelineScratchSizeAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( &sizeInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); + + return createResultValueType( static_cast( result ), sizeInfo ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetExecutionGraphPipelineNodeIndexAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( pNodeInfo ), pNodeIndex ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::getExecutionGraphPipelineNodeIndexAMDX( + VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + uint32_t nodeIndex; + VkResult result = d.vkGetExecutionGraphPipelineNodeIndexAMDX( + m_device, static_cast( executionGraph ), reinterpret_cast( &nodeInfo ), &nodeIndex ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); + + return createResultValueType( static_cast( result ), nodeIndex ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast( scratch ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast( scratch ), reinterpret_cast( pCountInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast( scratch ), reinterpret_cast( &countInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphIndirectAMDX( + m_commandBuffer, static_cast( scratch ), reinterpret_cast( pCountInfo ) ); + } + +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + d.vkCmdDispatchGraphIndirectAMDX( + m_commandBuffer, static_cast( scratch ), reinterpret_cast( &countInfo ) ); + } +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast( scratch ), static_cast( countInfo ) ); + } +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === template @@ -18104,45 +18416,45 @@ namespace VULKAN_HPP_NAMESPACE template VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetImageSubresourceLayout2EXT( m_device, static_cast( image ), - reinterpret_cast( pSubresource ), - reinterpret_cast( pLayout ) ); + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Device::getImageSubresourceLayout2EXT( - VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2EXT( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; d.vkGetImageSubresourceLayout2EXT( m_device, static_cast( image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return layout; } template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2EXT( - VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); VULKAN_HPP_NAMESPACE::StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get(); + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); d.vkGetImageSubresourceLayout2EXT( m_device, static_cast( image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return structureChain; } @@ -21935,6 +22247,133 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_maintenance5 === + + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindIndexBuffer2KHR( m_commandBuffer, + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + template + VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderingAreaGranularityKHR( + m_device, reinterpret_cast( pRenderingAreaInfo ), reinterpret_cast( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderingAreaGranularityKHR( + m_device, reinterpret_cast( &renderingAreaInfo ), reinterpret_cast( &granularity ) ); + + return granularity; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast( pInfo ), reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + d.vkGetDeviceImageSubresourceLayoutKHR( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2KHR( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2KHR( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + d.vkGetImageSubresourceLayout2KHR( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_shader_object === template diff --git a/include/vulkan/vulkan_handles.hpp b/include/vulkan/vulkan_handles.hpp index 9c6f0f6..9e7304f 100644 --- a/include/vulkan/vulkan_handles.hpp +++ b/include/vulkan/vulkan_handles.hpp @@ -829,6 +829,18 @@ namespace VULKAN_HPP_NAMESPACE struct AndroidHardwareBufferFormatProperties2ANDROID; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + struct PhysicalDeviceShaderEnqueueFeaturesAMDX; + struct PhysicalDeviceShaderEnqueuePropertiesAMDX; + struct ExecutionGraphPipelineScratchSizeAMDX; + struct ExecutionGraphPipelineCreateInfoAMDX; + struct DispatchGraphInfoAMDX; + struct DispatchGraphCountInfoAMDX; + struct PipelineShaderStageNodeCreateInfoAMDX; + union DeviceOrHostAddressConstAMDX; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === struct SampleLocationEXT; struct SampleLocationsInfoEXT; @@ -1152,8 +1164,6 @@ namespace VULKAN_HPP_NAMESPACE struct HostImageLayoutTransitionInfoEXT; struct SubresourceHostMemcpySizeEXT; struct HostImageCopyDevicePerformanceQueryEXT; - struct SubresourceLayout2EXT; - struct ImageSubresource2EXT; //=== VK_KHR_map_memory2 === struct MemoryMapInfoKHR; @@ -1598,6 +1608,18 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_pipeline_protected_access === struct PhysicalDevicePipelineProtectedAccessFeaturesEXT; + //=== VK_KHR_maintenance5 === + struct PhysicalDeviceMaintenance5FeaturesKHR; + struct PhysicalDeviceMaintenance5PropertiesKHR; + struct RenderingAreaInfoKHR; + struct DeviceImageSubresourceInfoKHR; + struct ImageSubresource2KHR; + using ImageSubresource2EXT = ImageSubresource2KHR; + struct SubresourceLayout2KHR; + using SubresourceLayout2EXT = SubresourceLayout2KHR; + struct PipelineCreateFlags2CreateInfoKHR; + struct BufferUsageFlags2CreateInfoKHR; + //=== VK_KHR_ray_tracing_position_fetch === struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR; @@ -5081,6 +5103,41 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + template + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === template @@ -6161,6 +6218,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_maintenance5 === + + template + void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_shader_object === template @@ -10860,6 +10926,94 @@ namespace VULKAN_HPP_NAMESPACE # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + template + VULKAN_HPP_NODISCARD Result createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + uint32_t createInfoCount, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename B0 = PipelineAllocator, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue + createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename B0 = PipelineAllocator, + typename std::enable_if>::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> + createExecutionGraphPipelinesAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + Optional allocator, + PipelineAllocator & pipelineAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template + VULKAN_HPP_NODISCARD ResultValue> createExecutionGraphPipelineAMDXUnique( + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, + const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result + getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +# ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_KHR_get_memory_requirements2 === template @@ -11944,19 +12098,19 @@ namespace VULKAN_HPP_NAMESPACE template void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12793,6 +12947,52 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + //=== VK_KHR_maintenance5 === + + template + void getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_shader_object === template diff --git a/include/vulkan/vulkan_hash.hpp b/include/vulkan/vulkan_hash.hpp index f7305e7..d194824 100644 --- a/include/vulkan/vulkan_hash.hpp +++ b/include/vulkan/vulkan_hash.hpp @@ -1921,6 +1921,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR const & bufferUsageFlags2CreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.usage ); + return seed; + } + }; + template <> struct hash { @@ -3743,6 +3756,33 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2KHR const & imageSubresource2KHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.imageSubresource ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR const & deviceImageSubresourceInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pCreateInfo ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pSubresource ); + return seed; + } + }; + template <> struct hash { @@ -4241,6 +4281,57 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries ); + return seed; + } + }; + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & executionGraphPipelineCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.flags ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.stageCount ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pStages ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.pLibraryInfo ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.layout ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineHandle ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineCreateInfoAMDX.basePipelineIndex ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX const & executionGraphPipelineScratchSizeAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.size ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> struct hash { @@ -5631,19 +5722,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT const & imageSubresource2EXT ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2EXT.imageSubresource ); - return seed; - } - }; - template <> struct hash { @@ -8976,6 +9054,39 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR const & physicalDeviceMaintenance5FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.maintenance5 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR const & physicalDeviceMaintenance5PropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.earlyFragmentMultisampleCoverageAfterSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.earlyFragmentSampleMaskTestBeforeSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.depthStencilSwizzleOneSupport ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.polygonModePointSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.nonStrictSinglePixelWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.nonStrictWideLinesUseParallelogram ); + return seed; + } + }; + template <> struct hash { @@ -10335,6 +10446,42 @@ namespace std } }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX const & physicalDeviceShaderEnqueueFeaturesAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderEnqueue ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX const & physicalDeviceShaderEnqueuePropertiesAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphDepth ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderOutputNodes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.executionGraphDispatchAddressAlignment ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> struct hash { @@ -11543,6 +11690,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR const & pipelineCreateFlags2CreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.flags ); + return seed; + } + }; + template <> struct hash { @@ -11741,20 +11901,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const & pipelineLibraryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.libraryCount ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineLibraryCreateInfoKHR.pLibraries ); - return seed; - } - }; - template <> struct hash { @@ -11941,6 +12087,26 @@ namespace std } }; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX const & pipelineShaderStageNodeCreateInfoAMDX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.pNext ); + for ( const char * p = pipelineShaderStageNodeCreateInfoAMDX.pName; *p != '\0'; ++p ) + { + VULKAN_HPP_HASH_COMBINE( seed, *p ); + } + VULKAN_HPP_HASH_COMBINE( seed, pipelineShaderStageNodeCreateInfoAMDX.index ); + return seed; + } + }; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + template <> struct hash { @@ -12868,6 +13034,23 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR const & renderingAreaInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.pColorAttachmentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.depthAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.stencilAttachmentFormat ); + return seed; + } + }; + template <> struct hash { @@ -13579,14 +13762,14 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT const & subresourceLayout2EXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR const & subresourceLayout2KHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2EXT.subresourceLayout ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.subresourceLayout ); return seed; } }; diff --git a/include/vulkan/vulkan_raii.hpp b/include/vulkan/vulkan_raii.hpp index 741f69d..f065519 100644 --- a/include/vulkan/vulkan_raii.hpp +++ b/include/vulkan/vulkan_raii.hpp @@ -1068,6 +1068,20 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); @@ -1337,6 +1351,8 @@ namespace VULKAN_HPP_NAMESPACE vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; //=== VK_KHR_map_memory2 === vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); @@ -1629,6 +1645,13 @@ namespace VULKAN_HPP_NAMESPACE vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + //=== VK_EXT_shader_object === vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); @@ -2030,6 +2053,25 @@ namespace VULKAN_HPP_NAMESPACE PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +# else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; @@ -2449,6 +2491,12 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; + //=== VK_KHR_maintenance5 === + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; + //=== VK_EXT_shader_object === PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; @@ -3836,6 +3884,20 @@ namespace VULKAN_HPP_NAMESPACE getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VULKAN_HPP_NODISCARD std::vector createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createExecutionGraphPipelineAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_KHR_get_memory_requirements2 === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 @@ -4262,6 +4324,18 @@ namespace VULKAN_HPP_NAMESPACE createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + //=== VK_KHR_maintenance5 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_shader_object === VULKAN_HPP_NODISCARD std::vector @@ -5548,6 +5622,21 @@ namespace VULKAN_HPP_NAMESPACE void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT; + + void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; + + void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; + + void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT; @@ -6010,6 +6099,13 @@ namespace VULKAN_HPP_NAMESPACE void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT; + //=== VK_KHR_maintenance5 === + + void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_shader_object === void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, @@ -8152,12 +8248,21 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_host_image_copy === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT - getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain - getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT; + getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; + + //=== VK_KHR_maintenance5 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; private: VULKAN_HPP_NAMESPACE::Device m_device = {}; @@ -8916,6 +9021,30 @@ namespace VULKAN_HPP_NAMESPACE } } +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( *device ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + m_constructorSuccessCode = static_cast( + getDispatcher()->vkCreateExecutionGraphPipelinesAMDX( static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + 1, + reinterpret_cast( &createInfo ), + reinterpret_cast( m_allocator ), + reinterpret_cast( &m_pipeline ) ) ); + if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && + ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + detail::throwResultException( m_constructorSuccessCode, "vkCreateExecutionGraphPipelinesAMDX" ); + } + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, @@ -9087,6 +9216,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const; +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX getExecutionGraphScratchSizeAMDX() const; + + VULKAN_HPP_NODISCARD uint32_t getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const; +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_KHR_ray_tracing_pipeline === template @@ -9154,6 +9291,36 @@ namespace VULKAN_HPP_NAMESPACE } } +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher(); + std::vector pipelines( createInfos.size() ); + VULKAN_HPP_NAMESPACE::Result result = static_cast( dispatcher->vkCreateExecutionGraphPipelinesAMDX( + static_cast( *device ), + pipelineCache ? static_cast( **pipelineCache ) : 0, + createInfos.size(), + reinterpret_cast( createInfos.data() ), + reinterpret_cast( static_cast( allocator ) ), + pipelines.data() ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) ) + { + this->reserve( createInfos.size() ); + for ( auto const & pipeline : pipelines ) + { + this->emplace_back( device, pipeline, allocator, result ); + } + } + else + { + detail::throwResultException( result, "vkCreateExecutionGraphPipelinesAMDX" ); + } + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -16286,6 +16453,95 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::createExecutionGraphPipelinesAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createExecutionGraphPipelineAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX Pipeline::getExecutionGraphScratchSizeAMDX() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; + VkResult result = getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX( + static_cast( m_device ), static_cast( m_pipeline ), reinterpret_cast( &sizeInfo ) ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphScratchSizeAMDX" ); + + return sizeInfo; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t + Pipeline::getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX && + "Function requires " ); + + uint32_t nodeIndex; + VkResult result = + getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast( m_device ), + static_cast( m_pipeline ), + reinterpret_cast( &nodeInfo ), + &nodeIndex ); + resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphNodeIndexAMDX" ); + + return nodeIndex; + } + + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX && + "Function requires " ); + + getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast( m_commandBuffer ), static_cast( scratch ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphAMDX && "Function requires " ); + + getDispatcher()->vkCmdDispatchGraphAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + reinterpret_cast( &countInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectAMDX && "Function requires " ); + + getDispatcher()->vkCmdDispatchGraphIndirectAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + reinterpret_cast( &countInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX && + "Function requires " ); + + getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX( + static_cast( m_commandBuffer ), static_cast( scratch ), static_cast( countInfo ) ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === VULKAN_HPP_INLINE void @@ -18236,34 +18492,36 @@ namespace VULKAN_HPP_NAMESPACE resultCheck( static_cast( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT - Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && - "Function requires or " ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2EXT && + "Function requires or or " ); - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), static_cast( m_image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return layout; } template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain - Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource ) const VULKAN_HPP_NOEXCEPT + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && - "Function requires or " ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2EXT && + "Function requires or or " ); VULKAN_HPP_NAMESPACE::StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get(); + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), static_cast( m_image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return structureChain; } @@ -20185,6 +20443,99 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &executeInfo ) ); } + //=== VK_KHR_maintenance5 === + + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2KHR && "Function requires " ); + + getDispatcher()->vkCmdBindIndexBuffer2KHR( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularityKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + getDispatcher()->vkGetRenderingAreaGranularityKHR( static_cast( m_device ), + reinterpret_cast( &renderingAreaInfo ), + reinterpret_cast( &granularity ) ); + + return granularity; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR + Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2KHR && + "Function requires or or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2KHR && + "Function requires or or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + //=== VK_EXT_shader_object === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector diff --git a/include/vulkan/vulkan_static_assertions.hpp b/include/vulkan/vulkan_static_assertions.hpp index ba79591..1fb34db 100644 --- a/include/vulkan/vulkan_static_assertions.hpp +++ b/include/vulkan/vulkan_static_assertions.hpp @@ -3220,6 +3220,63 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderEnqueueFeaturesAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueuePropertiesAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderEnqueuePropertiesAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX ) == sizeof( VkExecutionGraphPipelineScratchSizeAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExecutionGraphPipelineScratchSizeAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX ) == sizeof( VkExecutionGraphPipelineCreateInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ExecutionGraphPipelineCreateInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX ) == sizeof( VkDispatchGraphInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchGraphInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX ) == sizeof( VkDispatchGraphCountInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DispatchGraphCountInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX ) == sizeof( VkPipelineShaderStageNodeCreateInfoAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineShaderStageNodeCreateInfoAMDX is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX ) == sizeof( VkDeviceOrHostAddressConstAMDX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceOrHostAddressConstAMDX is not nothrow_move_constructible!" ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_EXT_sample_locations === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); @@ -4623,17 +4680,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "HostImageCopyDevicePerformanceQueryEXT is not nothrow_move_constructible!" ); -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT ) == sizeof( VkSubresourceLayout2EXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "SubresourceLayout2EXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2EXT ) == sizeof( VkImageSubresource2EXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "ImageSubresource2EXT is not nothrow_move_constructible!" ); - //=== VK_KHR_map_memory2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR ) == sizeof( VkMemoryMapInfoKHR ), "struct and wrapper have different size!" ); @@ -6567,6 +6613,56 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_KHR_maintenance5 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance5FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance5FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance5PropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance5PropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR ) == sizeof( VkRenderingAreaInfoKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingAreaInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR ) == sizeof( VkDeviceImageSubresourceInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceImageSubresourceInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2KHR ) == sizeof( VkImageSubresource2KHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresource2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR ) == sizeof( VkSubresourceLayout2KHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceLayout2KHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR ) == sizeof( VkPipelineCreateFlags2CreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreateFlags2CreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR ) == sizeof( VkBufferUsageFlags2CreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferUsageFlags2CreateInfoKHR is not nothrow_move_constructible!" ); + //=== VK_KHR_ray_tracing_position_fetch === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR ) == diff --git a/include/vulkan/vulkan_structs.hpp b/include/vulkan/vulkan_structs.hpp index 97dbee4..8ffe8f2 100644 --- a/include/vulkan/vulkan_structs.hpp +++ b/include/vulkan/vulkan_structs.hpp @@ -12512,6 +12512,103 @@ namespace VULKAN_HPP_NAMESPACE }; using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; + struct BufferUsageFlags2CreateInfoKHR + { + using NativeType = VkBufferUsageFlags2CreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferUsageFlags2CreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfoKHR( VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , usage( usage_ ) + { + } + + VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfoKHR( BufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BufferUsageFlags2CreateInfoKHR( VkBufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferUsageFlags2CreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + BufferUsageFlags2CreateInfoKHR & operator=( BufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BufferUsageFlags2CreateInfoKHR & operator=( VkBufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfoKHR & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBufferUsageFlags2CreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferUsageFlags2CreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, usage ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BufferUsageFlags2CreateInfoKHR const & ) const = default; +#else + bool operator==( BufferUsageFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); +# endif + } + + bool operator!=( BufferUsageFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferUsageFlags2CreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage = {}; + }; + + template <> + struct CppType + { + using Type = BufferUsageFlags2CreateInfoKHR; + }; + struct BufferViewCreateInfo { using NativeType = VkBufferViewCreateInfo; @@ -28425,6 +28522,213 @@ namespace VULKAN_HPP_NAMESPACE }; using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; + struct ImageSubresource2KHR + { + using NativeType = VkImageSubresource2KHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2KHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageSubresource2KHR( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , imageSubresource( imageSubresource_ ) + { + } + + VULKAN_HPP_CONSTEXPR ImageSubresource2KHR( ImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageSubresource2KHR( VkImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageSubresource2KHR( *reinterpret_cast( &rhs ) ) + { + } + + ImageSubresource2KHR & operator=( ImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageSubresource2KHR & operator=( VkImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageSubresource2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageSubresource2KHR & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageSubresource2KHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresource2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageSubresource ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageSubresource2KHR const & ) const = default; +#else + bool operator==( ImageSubresource2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSubresource == rhs.imageSubresource ); +# endif + } + + bool operator!=( ImageSubresource2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2KHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {}; + }; + + template <> + struct CppType + { + using Type = ImageSubresource2KHR; + }; + using ImageSubresource2EXT = ImageSubresource2KHR; + + struct DeviceImageSubresourceInfoKHR + { + using NativeType = VkDeviceImageSubresourceInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageSubresourceInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfoKHR( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pCreateInfo( pCreateInfo_ ) + , pSubresource( pSubresource_ ) + { + } + + VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfoKHR( DeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceImageSubresourceInfoKHR( VkDeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceImageSubresourceInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + DeviceImageSubresourceInfoKHR & operator=( DeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceImageSubresourceInfoKHR & operator=( VkDeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + pCreateInfo = pCreateInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR & + setPSubresource( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource_ ) VULKAN_HPP_NOEXCEPT + { + pSubresource = pSubresource_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDeviceImageSubresourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceImageSubresourceInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pCreateInfo, pSubresource ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceImageSubresourceInfoKHR const & ) const = default; +#else + bool operator==( DeviceImageSubresourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( pSubresource == rhs.pSubresource ); +# endif + } + + bool operator!=( DeviceImageSubresourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageSubresourceInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {}; + const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource = {}; + }; + + template <> + struct CppType + { + using Type = DeviceImageSubresourceInfoKHR; + }; + struct DeviceMemoryOpaqueCaptureAddressInfo { using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo; @@ -28733,6 +29037,51 @@ namespace VULKAN_HPP_NAMESPACE using Type = DeviceMemoryReportCallbackDataEXT; }; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + union DeviceOrHostAddressConstAMDX + { + using NativeType = VkDeviceOrHostAddressConstAMDX; +# if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} +# endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +# if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setHostAddress( const void * hostAddress_ ) VULKAN_HPP_NOEXCEPT + { + hostAddress = hostAddress_; + return *this; + } +# endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkDeviceOrHostAddressConstAMDX const &() const + { + return *reinterpret_cast( this ); + } + + operator VkDeviceOrHostAddressConstAMDX &() + { + return *reinterpret_cast( this ); + } + +# ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress; + const void * hostAddress; +# else + VkDeviceAddress deviceAddress; + const void * hostAddress; +# endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct DevicePrivateDataCreateInfo { using NativeType = VkDevicePrivateDataCreateInfo; @@ -29410,6 +29759,175 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct DispatchGraphCountInfoAMDX + { + using NativeType = VkDispatchGraphCountInfoAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( uint32_t count_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos_ = {}, + uint64_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : count( count_ ) + , infos( infos_ ) + , stride( stride_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchGraphCountInfoAMDX( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchGraphCountInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + + DispatchGraphCountInfoAMDX & operator=( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DispatchGraphCountInfoAMDX & operator=( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setCount( uint32_t count_ ) VULKAN_HPP_NOEXCEPT + { + count = count_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setInfos( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & infos_ ) VULKAN_HPP_NOEXCEPT + { + infos = infos_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setStride( uint64_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDispatchGraphCountInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchGraphCountInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( count, infos, stride ); + } +# endif + + public: + uint32_t count = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos = {}; + uint64_t stride = {}; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct DispatchGraphInfoAMDX + { + using NativeType = VkDispatchGraphInfoAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( uint32_t nodeIndex_ = {}, + uint32_t payloadCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads_ = {}, + uint64_t payloadStride_ = {} ) VULKAN_HPP_NOEXCEPT + : nodeIndex( nodeIndex_ ) + , payloadCount( payloadCount_ ) + , payloads( payloads_ ) + , payloadStride( payloadStride_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DispatchGraphInfoAMDX( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : DispatchGraphInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + + DispatchGraphInfoAMDX & operator=( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DispatchGraphInfoAMDX & operator=( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setNodeIndex( uint32_t nodeIndex_ ) VULKAN_HPP_NOEXCEPT + { + nodeIndex = nodeIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadCount( uint32_t payloadCount_ ) VULKAN_HPP_NOEXCEPT + { + payloadCount = payloadCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloads( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX const & payloads_ ) VULKAN_HPP_NOEXCEPT + { + payloads = payloads_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadStride( uint64_t payloadStride_ ) VULKAN_HPP_NOEXCEPT + { + payloadStride = payloadStride_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDispatchGraphInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchGraphInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( nodeIndex, payloadCount, payloads, payloadStride ); + } +# endif + + public: + uint32_t nodeIndex = {}; + uint32_t payloadCount = {}; + VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads = {}; + uint64_t payloadStride = {}; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct DispatchIndirectCommand { using NativeType = VkDispatchIndirectCommand; @@ -31909,6 +32427,425 @@ namespace VULKAN_HPP_NAMESPACE using Type = EventCreateInfo; }; + struct PipelineLibraryCreateInfoKHR + { + using NativeType = VkPipelineLibraryCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , libraryCount( libraryCount_ ) + , pLibraries( pLibraries_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineLibraryCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), libraryCount( static_cast( libraries_.size() ) ), pLibraries( libraries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = libraryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT + { + pLibraries = pLibraries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineLibraryCreateInfoKHR & + setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) VULKAN_HPP_NOEXCEPT + { + libraryCount = static_cast( libraries_.size() ); + pLibraries = libraries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, libraryCount, pLibraries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && ( pLibraries == rhs.pLibraries ); +# endif + } + + bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; + const void * pNext = {}; + uint32_t libraryCount = {}; + const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {}; + }; + + template <> + struct CppType + { + using Type = PipelineLibraryCreateInfoKHR; + }; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct ExecutionGraphPipelineCreateInfoAMDX + { + using NativeType = VkExecutionGraphPipelineCreateInfoAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineCreateInfoAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , pLibraryInfo( pLibraryInfo_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExecutionGraphPipelineCreateInfoAMDX( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : ExecutionGraphPipelineCreateInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ExecutionGraphPipelineCreateInfoAMDX( + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_, + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , stageCount( static_cast( stages_.size() ) ) + , pStages( stages_.data() ) + , pLibraryInfo( pLibraryInfo_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + ExecutionGraphPipelineCreateInfoAMDX & operator=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExecutionGraphPipelineCreateInfoAMDX & operator=( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + ExecutionGraphPipelineCreateInfoAMDX & + setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = static_cast( stages_.size() ); + pStages = stages_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT + { + pLibraryInfo = pLibraryInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & + setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExecutionGraphPipelineCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExecutionGraphPipelineCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, stageCount, pStages, pLibraryInfo, layout, basePipelineHandle, basePipelineIndex ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExecutionGraphPipelineCreateInfoAMDX const & ) const = default; +# else + bool operator==( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && + ( pLibraryInfo == rhs.pLibraryInfo ) && ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && + ( basePipelineIndex == rhs.basePipelineIndex ); +# endif + } + + bool operator!=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineCreateInfoAMDX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo * pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + + template <> + struct CppType + { + using Type = ExecutionGraphPipelineCreateInfoAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct ExecutionGraphPipelineScratchSizeAMDX + { + using NativeType = VkExecutionGraphPipelineScratchSizeAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , size( size_ ) + { + } + + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ExecutionGraphPipelineScratchSizeAMDX( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : ExecutionGraphPipelineScratchSizeAMDX( *reinterpret_cast( &rhs ) ) + { + } + + ExecutionGraphPipelineScratchSizeAMDX & operator=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ExecutionGraphPipelineScratchSizeAMDX & operator=( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkExecutionGraphPipelineScratchSizeAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExecutionGraphPipelineScratchSizeAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, size ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ExecutionGraphPipelineScratchSizeAMDX const & ) const = default; +# else + bool operator==( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size ); +# endif + } + + bool operator!=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + + template <> + struct CppType + { + using Type = ExecutionGraphPipelineScratchSizeAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct ExportFenceCreateInfo { using NativeType = VkExportFenceCreateInfo; @@ -39119,7 +40056,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) , flags( flags_ ) { @@ -39142,7 +40079,7 @@ namespace VULKAN_HPP_NAMESPACE } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -39169,7 +40106,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -39197,7 +40134,7 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; - void * pNext = {}; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags = {}; }; @@ -42719,102 +43656,6 @@ namespace VULKAN_HPP_NAMESPACE }; using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; - struct ImageSubresource2EXT - { - using NativeType = VkImageSubresource2EXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2EXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageSubresource2EXT( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageSubresource( imageSubresource_ ) - { - } - - VULKAN_HPP_CONSTEXPR ImageSubresource2EXT( ImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - ImageSubresource2EXT( VkImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageSubresource2EXT( *reinterpret_cast( &rhs ) ) - { - } - - ImageSubresource2EXT & operator=( ImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - ImageSubresource2EXT & operator=( VkImageSubresource2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageSubresource2EXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 ImageSubresource2EXT & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT - { - imageSubresource = imageSubresource_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkImageSubresource2EXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkImageSubresource2EXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, imageSubresource ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageSubresource2EXT const & ) const = default; -#else - bool operator==( ImageSubresource2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSubresource == rhs.imageSubresource ); -# endif - } - - bool operator!=( ImageSubresource2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2EXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {}; - }; - - template <> - struct CppType - { - using Type = ImageSubresource2EXT; - }; - struct ImageSwapchainCreateInfoKHR { using NativeType = VkImageSwapchainCreateInfoKHR; @@ -66640,6 +67481,219 @@ namespace VULKAN_HPP_NAMESPACE }; using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; + struct PhysicalDeviceMaintenance5FeaturesKHR + { + using NativeType = VkPhysicalDeviceMaintenance5FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maintenance5( maintenance5_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5FeaturesKHR( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance5FeaturesKHR( VkPhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance5FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance5FeaturesKHR & operator=( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance5FeaturesKHR & operator=( VkPhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5FeaturesKHR & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT + { + maintenance5 = maintenance5_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMaintenance5FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance5 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance5FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance5 == rhs.maintenance5 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance5 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance5FeaturesKHR; + }; + + struct PhysicalDeviceMaintenance5PropertiesKHR + { + using NativeType = VkPhysicalDeviceMaintenance5PropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5PropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5PropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , earlyFragmentMultisampleCoverageAfterSampleCounting( earlyFragmentMultisampleCoverageAfterSampleCounting_ ) + , earlyFragmentSampleMaskTestBeforeSampleCounting( earlyFragmentSampleMaskTestBeforeSampleCounting_ ) + , depthStencilSwizzleOneSupport( depthStencilSwizzleOneSupport_ ) + , polygonModePointSize( polygonModePointSize_ ) + , nonStrictSinglePixelWideLinesUseParallelogram( nonStrictSinglePixelWideLinesUseParallelogram_ ) + , nonStrictWideLinesUseParallelogram( nonStrictWideLinesUseParallelogram_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5PropertiesKHR( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance5PropertiesKHR( VkPhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance5PropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance5PropertiesKHR & operator=( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance5PropertiesKHR & operator=( VkPhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance5PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance5PropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + earlyFragmentMultisampleCoverageAfterSampleCounting, + earlyFragmentSampleMaskTestBeforeSampleCounting, + depthStencilSwizzleOneSupport, + polygonModePointSize, + nonStrictSinglePixelWideLinesUseParallelogram, + nonStrictWideLinesUseParallelogram ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance5PropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting ) && + ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting ) && + ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport ) && ( polygonModePointSize == rhs.polygonModePointSize ) && + ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram ) && + ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5PropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport = {}; + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance5PropertiesKHR; + }; + struct PhysicalDeviceMemoryBudgetPropertiesEXT { using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT; @@ -75719,6 +76773,260 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; }; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceShaderEnqueueFeaturesAMDX + { + using NativeType = VkPhysicalDeviceShaderEnqueueFeaturesAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , shaderEnqueue( shaderEnqueue_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderEnqueueFeaturesAMDX( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderEnqueueFeaturesAMDX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & setShaderEnqueue( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ ) VULKAN_HPP_NOEXCEPT + { + shaderEnqueue = shaderEnqueue_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderEnqueue ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderEnqueueFeaturesAMDX const & ) const = default; +# else + bool operator==( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEnqueue == rhs.shaderEnqueue ); +# endif + } + + bool operator!=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderEnqueueFeaturesAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PhysicalDeviceShaderEnqueuePropertiesAMDX + { + using NativeType = VkPhysicalDeviceShaderEnqueuePropertiesAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueuePropertiesAMDX( uint32_t maxExecutionGraphDepth_ = {}, + uint32_t maxExecutionGraphShaderOutputNodes_ = {}, + uint32_t maxExecutionGraphShaderPayloadSize_ = {}, + uint32_t maxExecutionGraphShaderPayloadCount_ = {}, + uint32_t executionGraphDispatchAddressAlignment_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , maxExecutionGraphDepth( maxExecutionGraphDepth_ ) + , maxExecutionGraphShaderOutputNodes( maxExecutionGraphShaderOutputNodes_ ) + , maxExecutionGraphShaderPayloadSize( maxExecutionGraphShaderPayloadSize_ ) + , maxExecutionGraphShaderPayloadCount( maxExecutionGraphShaderPayloadCount_ ) + , executionGraphDispatchAddressAlignment( executionGraphDispatchAddressAlignment_ ) + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueuePropertiesAMDX( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderEnqueuePropertiesAMDX( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderEnqueuePropertiesAMDX( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & setMaxExecutionGraphDepth( uint32_t maxExecutionGraphDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphDepth = maxExecutionGraphDepth_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphShaderOutputNodes( uint32_t maxExecutionGraphShaderOutputNodes_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphShaderOutputNodes = maxExecutionGraphShaderOutputNodes_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphShaderPayloadSize( uint32_t maxExecutionGraphShaderPayloadSize_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphShaderPayloadSize = maxExecutionGraphShaderPayloadSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphShaderPayloadCount( uint32_t maxExecutionGraphShaderPayloadCount_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphShaderPayloadCount = maxExecutionGraphShaderPayloadCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setExecutionGraphDispatchAddressAlignment( uint32_t executionGraphDispatchAddressAlignment_ ) VULKAN_HPP_NOEXCEPT + { + executionGraphDispatchAddressAlignment = executionGraphDispatchAddressAlignment_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxExecutionGraphDepth, + maxExecutionGraphShaderOutputNodes, + maxExecutionGraphShaderPayloadSize, + maxExecutionGraphShaderPayloadCount, + executionGraphDispatchAddressAlignment ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderEnqueuePropertiesAMDX const & ) const = default; +# else + bool operator==( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxExecutionGraphDepth == rhs.maxExecutionGraphDepth ) && + ( maxExecutionGraphShaderOutputNodes == rhs.maxExecutionGraphShaderOutputNodes ) && + ( maxExecutionGraphShaderPayloadSize == rhs.maxExecutionGraphShaderPayloadSize ) && + ( maxExecutionGraphShaderPayloadCount == rhs.maxExecutionGraphShaderPayloadCount ) && + ( executionGraphDispatchAddressAlignment == rhs.executionGraphDispatchAddressAlignment ); +# endif + } + + bool operator!=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +# endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; + void * pNext = {}; + uint32_t maxExecutionGraphDepth = {}; + uint32_t maxExecutionGraphShaderOutputNodes = {}; + uint32_t maxExecutionGraphShaderPayloadSize = {}; + uint32_t maxExecutionGraphShaderPayloadCount = {}; + uint32_t executionGraphDispatchAddressAlignment = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderEnqueuePropertiesAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct PhysicalDeviceShaderFloat16Int8Features { using NativeType = VkPhysicalDeviceShaderFloat16Int8Features; @@ -84244,6 +85552,103 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineCoverageToColorStateCreateInfoNV; }; + struct PipelineCreateFlags2CreateInfoKHR + { + using NativeType = VkPipelineCreateFlags2CreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateFlags2CreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , flags( flags_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfoKHR( PipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreateFlags2CreateInfoKHR( VkPipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreateFlags2CreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCreateFlags2CreateInfoKHR & operator=( PipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCreateFlags2CreateInfoKHR & operator=( VkPipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineCreateFlags2CreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreateFlags2CreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreateFlags2CreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineCreateFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); +# endif + } + + bool operator!=( PipelineCreateFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateFlags2CreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCreateFlags2CreateInfoKHR; + }; + struct PipelineCreationFeedback { using NativeType = VkPipelineCreationFeedback; @@ -85801,130 +87206,6 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineLayoutCreateInfo; }; - struct PipelineLibraryCreateInfoKHR - { - using NativeType = VkPipelineLibraryCreateInfoKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, - const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , libraryCount( libraryCount_ ) - , pLibraries( pLibraries_ ) - { - } - - VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineLibraryCreateInfoKHR( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), libraryCount( static_cast( libraries_.size() ) ), pLibraries( libraries_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT - { - libraryCount = libraryCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ ) VULKAN_HPP_NOEXCEPT - { - pLibraries = pLibraries_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineLibraryCreateInfoKHR & - setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & libraries_ ) VULKAN_HPP_NOEXCEPT - { - libraryCount = static_cast( libraries_.size() ); - pLibraries = libraries_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, libraryCount, pLibraries ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default; -#else - bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && ( pLibraries == rhs.pLibraries ); -# endif - } - - bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; - const void * pNext = {}; - uint32_t libraryCount = {}; - const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries = {}; - }; - - template <> - struct CppType - { - using Type = PipelineLibraryCreateInfoKHR; - }; - struct PipelinePropertiesIdentifierEXT { using NativeType = VkPipelinePropertiesIdentifierEXT; @@ -87349,6 +88630,122 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineShaderStageModuleIdentifierCreateInfoEXT; }; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + struct PipelineShaderStageNodeCreateInfoAMDX + { + using NativeType = VkPipelineShaderStageNodeCreateInfoAMDX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX; + +# if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PipelineShaderStageNodeCreateInfoAMDX( const char * pName_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , pName( pName_ ) + , index( index_ ) + { + } + + VULKAN_HPP_CONSTEXPR PipelineShaderStageNodeCreateInfoAMDX( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineShaderStageNodeCreateInfoAMDX( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineShaderStageNodeCreateInfoAMDX( *reinterpret_cast( &rhs ) ) + { + } + + PipelineShaderStageNodeCreateInfoAMDX & operator=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineShaderStageNodeCreateInfoAMDX & operator=( VkPipelineShaderStageNodeCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setPName( const char * pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageNodeCreateInfoAMDX & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineShaderStageNodeCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageNodeCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pName, index ); + } +# endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( pName != rhs.pName ) + if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = index <=> rhs.index; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +# endif + + bool operator==( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( index == rhs.index ); + } + + bool operator!=( PipelineShaderStageNodeCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageNodeCreateInfoAMDX; + const void * pNext = {}; + const char * pName = {}; + uint32_t index = {}; + }; + + template <> + struct CppType + { + using Type = PipelineShaderStageNodeCreateInfoAMDX; + }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct PipelineShaderStageRequiredSubgroupSizeCreateInfo { using NativeType = VkPipelineShaderStageRequiredSubgroupSizeCreateInfo; @@ -95155,6 +96552,174 @@ namespace VULKAN_HPP_NAMESPACE using Type = RenderPassTransformBeginInfoQCOM; }; + struct RenderingAreaInfoKHR + { + using NativeType = VkRenderingAreaInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAreaInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR RenderingAreaInfoKHR( uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext( pNext_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachmentFormats( pColorAttachmentFormats_ ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + { + } + + VULKAN_HPP_CONSTEXPR RenderingAreaInfoKHR( RenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + RenderingAreaInfoKHR( VkRenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingAreaInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingAreaInfoKHR( uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , viewMask( viewMask_ ) + , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) + , pColorAttachmentFormats( colorAttachmentFormats_.data() ) + , depthAttachmentFormat( depthAttachmentFormat_ ) + , stencilAttachmentFormat( stencilAttachmentFormat_ ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + RenderingAreaInfoKHR & operator=( RenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + RenderingAreaInfoKHR & operator=( VkRenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & + setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachmentFormats = pColorAttachmentFormats_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + RenderingAreaInfoKHR & setColorAttachmentFormats( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); + pColorAttachmentFormats = colorAttachmentFormats_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + depthAttachmentFormat = depthAttachmentFormat_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + { + stencilAttachmentFormat = stencilAttachmentFormat_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkRenderingAreaInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderingAreaInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( RenderingAreaInfoKHR const & ) const = default; +#else + bool operator==( RenderingAreaInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && + ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && + ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ); +# endif + } + + bool operator!=( RenderingAreaInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAreaInfoKHR; + const void * pNext = {}; + uint32_t viewMask = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats = {}; + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + + template <> + struct CppType + { + using Type = RenderingAreaInfoKHR; + }; + struct RenderingAttachmentInfo { using NativeType = VkRenderingAttachmentInfo; @@ -100877,44 +102442,44 @@ namespace VULKAN_HPP_NAMESPACE using Type = SubresourceHostMemcpySizeEXT; }; - struct SubresourceLayout2EXT + struct SubresourceLayout2KHR { - using NativeType = VkSubresourceLayout2EXT; + using NativeType = VkSubresourceLayout2KHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2EXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2KHR; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) , subresourceLayout( subresourceLayout_ ) { } - VULKAN_HPP_CONSTEXPR SubresourceLayout2EXT( SubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubresourceLayout2KHR( SubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubresourceLayout2EXT( VkSubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SubresourceLayout2EXT( *reinterpret_cast( &rhs ) ) + SubresourceLayout2KHR( VkSubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + : SubresourceLayout2KHR( *reinterpret_cast( &rhs ) ) { } - SubresourceLayout2EXT & operator=( SubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubresourceLayout2KHR & operator=( SubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubresourceLayout2EXT & operator=( VkSubresourceLayout2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + SubresourceLayout2KHR & operator=( VkSubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSubresourceLayout2EXT const &() const VULKAN_HPP_NOEXCEPT + operator VkSubresourceLayout2KHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubresourceLayout2EXT &() VULKAN_HPP_NOEXCEPT + operator VkSubresourceLayout2KHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -100930,9 +102495,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubresourceLayout2EXT const & ) const = default; + auto operator<=>( SubresourceLayout2KHR const & ) const = default; #else - bool operator==( SubresourceLayout2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubresourceLayout2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -100941,23 +102506,24 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( SubresourceLayout2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubresourceLayout2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2EXT; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2KHR; void * pNext = {}; VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout = {}; }; template <> - struct CppType + struct CppType { - using Type = SubresourceLayout2EXT; + using Type = SubresourceLayout2KHR; }; + using SubresourceLayout2EXT = SubresourceLayout2KHR; struct SurfaceCapabilities2EXT { diff --git a/include/vulkan/vulkan_to_string.hpp b/include/vulkan/vulkan_to_string.hpp index 7e3a8f5..d04d891 100644 --- a/include/vulkan/vulkan_to_string.hpp +++ b/include/vulkan/vulkan_to_string.hpp @@ -600,6 +600,10 @@ namespace VULKAN_HPP_NAMESPACE result += "TransformFeedbackCounterBufferEXT | "; if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits::eExecutionGraphScratchAMDX ) + result += "ExecutionGraphScratchAMDX | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) result += "AccelerationStructureBuildInputReadOnlyKHR | "; if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR ) @@ -3303,6 +3307,148 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + //=== VK_KHR_maintenance5 === + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags2KHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCreateFlagBits2KHR::eDisableOptimization ) + result += "DisableOptimization | "; + if ( value & PipelineCreateFlagBits2KHR::eAllowDerivatives ) + result += "AllowDerivatives | "; + if ( value & PipelineCreateFlagBits2KHR::eDerivative ) + result += "Derivative | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineCreateFlagBits2KHR::eReserved28NV ) + result += "Reserved28NV | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex ) + result += "ViewIndexFromDeviceIndex | "; + if ( value & PipelineCreateFlagBits2KHR::eDispatchBase ) + result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits2KHR::eDeferCompile ) + result += "DeferCompile | "; + if ( value & PipelineCreateFlagBits2KHR::eCaptureStatistics ) + result += "CaptureStatistics | "; + if ( value & PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations ) + result += "CaptureInternalRepresentations | "; + if ( value & PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired ) + result += "FailOnPipelineCompileRequired | "; + if ( value & PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure ) + result += "EarlyReturnOnFailure | "; + if ( value & PipelineCreateFlagBits2KHR::eLinkTimeOptimization ) + result += "LinkTimeOptimization | "; + if ( value & PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo ) + result += "RetainLinkTimeOptimizationInfo | "; + if ( value & PipelineCreateFlagBits2KHR::eLibrary ) + result += "Library | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles ) + result += "RayTracingSkipTriangles | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs ) + result += "RayTracingSkipAabbs | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders ) + result += "RayTracingNoNullAnyHitShaders | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders ) + result += "RayTracingNoNullClosestHitShaders | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders ) + result += "RayTracingNoNullMissShaders | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders ) + result += "RayTracingNoNullIntersectionShaders | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay ) + result += "RayTracingShaderGroupHandleCaptureReplay | "; + if ( value & PipelineCreateFlagBits2KHR::eIndirectBindable ) + result += "IndirectBindable | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingAllowMotion ) + result += "RayTracingAllowMotion | "; + if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment ) + result += "RenderingFragmentShadingRateAttachment | "; + if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment ) + result += "RenderingFragmentDensityMapAttachment | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap ) + result += "RayTracingOpacityMicromap | "; + if ( value & PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop ) + result += "ColorAttachmentFeedbackLoop | "; + if ( value & PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop ) + result += "DepthStencilAttachmentFeedbackLoop | "; + if ( value & PipelineCreateFlagBits2KHR::eNoProtectedAccess ) + result += "NoProtectedAccess | "; + if ( value & PipelineCreateFlagBits2KHR::eProtectedAccessOnly ) + result += "ProtectedAccessOnly | "; + if ( value & PipelineCreateFlagBits2KHR::eDescriptorBuffer ) + result += "DescriptorBuffer | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags2KHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BufferUsageFlagBits2KHR::eTransferSrc ) + result += "TransferSrc | "; + if ( value & BufferUsageFlagBits2KHR::eTransferDst ) + result += "TransferDst | "; + if ( value & BufferUsageFlagBits2KHR::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eUniformBuffer ) + result += "UniformBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eStorageBuffer ) + result += "StorageBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eIndexBuffer ) + result += "IndexBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eIndirectBuffer ) + result += "IndirectBuffer | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX ) + result += "ExecutionGraphScratchAMDX | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits2KHR::eConditionalRendering ) + result += "ConditionalRendering | "; + if ( value & BufferUsageFlagBits2KHR::eShaderBindingTable ) + result += "ShaderBindingTable | "; + if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackBuffer ) + result += "TransformFeedbackBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer ) + result += "TransformFeedbackCounterBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eVideoDecodeSrc ) + result += "VideoDecodeSrc | "; + if ( value & BufferUsageFlagBits2KHR::eVideoDecodeDst ) + result += "VideoDecodeDst | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits2KHR::eVideoEncodeDst ) + result += "VideoEncodeDst | "; + if ( value & BufferUsageFlagBits2KHR::eVideoEncodeSrc ) + result += "VideoEncodeSrc | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits2KHR::eShaderDeviceAddress ) + result += "ShaderDeviceAddress | "; + if ( value & BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly ) + result += "AccelerationStructureBuildInputReadOnly | "; + if ( value & BufferUsageFlagBits2KHR::eAccelerationStructureStorage ) + result += "AccelerationStructureStorage | "; + if ( value & BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer ) + result += "SamplerDescriptorBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eResourceDescriptorBuffer ) + result += "ResourceDescriptorBuffer | "; + if ( value & BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer ) + result += "PushDescriptorsDescriptorBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly ) + result += "MicromapBuildInputReadOnly | "; + if ( value & BufferUsageFlagBits2KHR::eMicromapStorage ) + result += "MicromapStorage | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + //=== VK_EXT_shader_object === VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagsEXT value ) @@ -3830,6 +3976,13 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eExternalFormatANDROID: return "ExternalFormatANDROID"; case StructureType::eAndroidHardwareBufferFormatProperties2ANDROID: return "AndroidHardwareBufferFormatProperties2ANDROID"; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX: return "PhysicalDeviceShaderEnqueueFeaturesAMDX"; + case StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX: return "PhysicalDeviceShaderEnqueuePropertiesAMDX"; + case StructureType::eExecutionGraphPipelineScratchSizeAMDX: return "ExecutionGraphPipelineScratchSizeAMDX"; + case StructureType::eExecutionGraphPipelineCreateInfoAMDX: return "ExecutionGraphPipelineCreateInfoAMDX"; + case StructureType::ePipelineShaderStageNodeCreateInfoAMDX: return "PipelineShaderStageNodeCreateInfoAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT"; @@ -4109,8 +4262,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR: return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR"; case StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT: return "PhysicalDeviceImageCompressionControlFeaturesEXT"; case StructureType::eImageCompressionControlEXT: return "ImageCompressionControlEXT"; - case StructureType::eSubresourceLayout2EXT: return "SubresourceLayout2EXT"; - case StructureType::eImageSubresource2EXT: return "ImageSubresource2EXT"; case StructureType::eImageCompressionPropertiesEXT: return "ImageCompressionPropertiesEXT"; case StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT"; case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT"; @@ -4239,6 +4390,14 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV"; case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT"; case StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT: return "PhysicalDevicePipelineProtectedAccessFeaturesEXT"; + case StructureType::ePhysicalDeviceMaintenance5FeaturesKHR: return "PhysicalDeviceMaintenance5FeaturesKHR"; + case StructureType::ePhysicalDeviceMaintenance5PropertiesKHR: return "PhysicalDeviceMaintenance5PropertiesKHR"; + case StructureType::eRenderingAreaInfoKHR: return "RenderingAreaInfoKHR"; + case StructureType::eDeviceImageSubresourceInfoKHR: return "DeviceImageSubresourceInfoKHR"; + case StructureType::eSubresourceLayout2KHR: return "SubresourceLayout2KHR"; + case StructureType::eImageSubresource2KHR: return "ImageSubresource2KHR"; + case StructureType::ePipelineCreateFlags2CreateInfoKHR: return "PipelineCreateFlags2CreateInfoKHR"; + case StructureType::eBufferUsageFlags2CreateInfoKHR: return "BufferUsageFlags2CreateInfoKHR"; case StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR: return "PhysicalDeviceRayTracingPositionFetchFeaturesKHR"; case StructureType::ePhysicalDeviceShaderObjectFeaturesEXT: return "PhysicalDeviceShaderObjectFeaturesEXT"; case StructureType::ePhysicalDeviceShaderObjectPropertiesEXT: return "PhysicalDeviceShaderObjectPropertiesEXT"; @@ -4608,6 +4767,8 @@ namespace VULKAN_HPP_NAMESPACE case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; case Format::eR16G16S105NV: return "R16G16S105NV"; + case Format::eA1B5G5R5UnormPack16KHR: return "A1B5G5R5UnormPack16KHR"; + case Format::eA8UnormKHR: return "A8UnormKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5046,6 +5207,9 @@ namespace VULKAN_HPP_NAMESPACE case BufferUsageFlagBits::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; case BufferUsageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR: return "AccelerationStructureBuildInputReadOnlyKHR"; case BufferUsageFlagBits::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR"; case BufferUsageFlagBits::eShaderBindingTableKHR: return "ShaderBindingTableKHR"; @@ -5784,6 +5948,9 @@ namespace VULKAN_HPP_NAMESPACE { case PipelineBindPoint::eGraphics: return "Graphics"; case PipelineBindPoint::eCompute: return "Compute"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineBindPoint::eExecutionGraphAMDX: return "ExecutionGraphAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case PipelineBindPoint::eRayTracingKHR: return "RayTracingKHR"; case PipelineBindPoint::eSubpassShadingHUAWEI: return "SubpassShadingHUAWEI"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; @@ -8516,6 +8683,87 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_KHR_maintenance5 === + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits2KHR value ) + { + switch ( value ) + { + case PipelineCreateFlagBits2KHR::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits2KHR::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits2KHR::eDerivative: return "Derivative"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineCreateFlagBits2KHR::eReserved28NV: return "Reserved28NV"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits2KHR::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits2KHR::eDeferCompile: return "DeferCompile"; + case PipelineCreateFlagBits2KHR::eCaptureStatistics: return "CaptureStatistics"; + case PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations: return "CaptureInternalRepresentations"; + case PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; + case PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; + case PipelineCreateFlagBits2KHR::eLinkTimeOptimization: return "LinkTimeOptimization"; + case PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo: return "RetainLinkTimeOptimizationInfo"; + case PipelineCreateFlagBits2KHR::eLibrary: return "Library"; + case PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles: return "RayTracingSkipTriangles"; + case PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs: return "RayTracingSkipAabbs"; + case PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders: return "RayTracingNoNullAnyHitShaders"; + case PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders: return "RayTracingNoNullClosestHitShaders"; + case PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders: return "RayTracingNoNullMissShaders"; + case PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders: return "RayTracingNoNullIntersectionShaders"; + case PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay: return "RayTracingShaderGroupHandleCaptureReplay"; + case PipelineCreateFlagBits2KHR::eIndirectBindable: return "IndirectBindable"; + case PipelineCreateFlagBits2KHR::eRayTracingAllowMotion: return "RayTracingAllowMotion"; + case PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment: return "RenderingFragmentShadingRateAttachment"; + case PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment: return "RenderingFragmentDensityMapAttachment"; + case PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap: return "RayTracingOpacityMicromap"; + case PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop: return "ColorAttachmentFeedbackLoop"; + case PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop: return "DepthStencilAttachmentFeedbackLoop"; + case PipelineCreateFlagBits2KHR::eNoProtectedAccess: return "NoProtectedAccess"; + case PipelineCreateFlagBits2KHR::eProtectedAccessOnly: return "ProtectedAccessOnly"; + case PipelineCreateFlagBits2KHR::eDescriptorBuffer: return "DescriptorBuffer"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits2KHR value ) + { + switch ( value ) + { + case BufferUsageFlagBits2KHR::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits2KHR::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits2KHR::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits2KHR::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits2KHR::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits2KHR::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits2KHR::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits2KHR::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits2KHR::eIndirectBuffer: return "IndirectBuffer"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits2KHR::eConditionalRendering: return "ConditionalRendering"; + case BufferUsageFlagBits2KHR::eShaderBindingTable: return "ShaderBindingTable"; + case BufferUsageFlagBits2KHR::eTransformFeedbackBuffer: return "TransformFeedbackBuffer"; + case BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer: return "TransformFeedbackCounterBuffer"; + case BufferUsageFlagBits2KHR::eVideoDecodeSrc: return "VideoDecodeSrc"; + case BufferUsageFlagBits2KHR::eVideoDecodeDst: return "VideoDecodeDst"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits2KHR::eVideoEncodeDst: return "VideoEncodeDst"; + case BufferUsageFlagBits2KHR::eVideoEncodeSrc: return "VideoEncodeSrc"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits2KHR::eShaderDeviceAddress: return "ShaderDeviceAddress"; + case BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly: return "AccelerationStructureBuildInputReadOnly"; + case BufferUsageFlagBits2KHR::eAccelerationStructureStorage: return "AccelerationStructureStorage"; + case BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer: return "SamplerDescriptorBuffer"; + case BufferUsageFlagBits2KHR::eResourceDescriptorBuffer: return "ResourceDescriptorBuffer"; + case BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer: return "PushDescriptorsDescriptorBuffer"; + case BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly: return "MicromapBuildInputReadOnly"; + case BufferUsageFlagBits2KHR::eMicromapStorage: return "MicromapStorage"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_EXT_shader_object === VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagBitsEXT value ) diff --git a/registry/genvk.py b/registry/genvk.py index 9cfabd4..71e6baf 100755 --- a/registry/genvk.py +++ b/registry/genvk.py @@ -417,11 +417,13 @@ def makeGenOpts(args): 'VK_EXT_video_encode_h264', 'VK_EXT_video_encode_h265', 'VK_NV_displacement_micromap', + 'VK_AMDX_shader_enqueue', ] betaSuppressExtensions = [ 'VK_KHR_video_queue', 'VK_EXT_opacity_micromap', + 'VK_KHR_pipeline_library', ] platforms = [ diff --git a/registry/parse_dependency.py b/registry/parse_dependency.py index 63eeabc..313b3c0 100755 --- a/registry/parse_dependency.py +++ b/registry/parse_dependency.py @@ -52,32 +52,79 @@ import operator import pyparsing as pp import re -def nameMarkup(name): - """Returns asciidoc markup to generate a link to an API version or - extension anchor. +def markupPassthrough(name): + """Pass a name (leaf or operator) through without applying markup""" + return name + +# A regexp matching Vulkan and VulkanSC core version names +# The Conventions is_api_version_name() method is similar, but does not +# return the matches. +apiVersionNamePat = re.compile(r'(VK|VKSC)_VERSION_([0-9]+)_([0-9]+)') + +def apiVersionNameMatch(name): + """Return [ apivariant, major, minor ] if name is an API version name, + or [ None, None, None ] if it is not.""" + + match = apiVersionNamePat.match(name) + if match is not None: + return [ match.group(1), match.group(2), match.group(3) ] + else: + return [ None, None, None ] + +def leafMarkupAsciidoc(name): + """Markup a leaf name as an asciidoc link to an API version or extension + anchor. - name - version or extension name""" - # Could use ApiConventions.is_api_version_name, but that does not split - # out the major/minor version numbers. - match = re.search("[A-Z]+_VERSION_([0-9]+)_([0-9]+)", name) - if match is not None: - major = match.group(1) - minor = match.group(2) - version = major + '.' + minor + (apivariant, major, minor) = apiVersionNameMatch(name) - # Vulkan SC has a different anchor pattern for version appendices - scMatch = re.search("[A-Z]+SC_VERSION_([0-9]+)_([0-9]+)", name) - if scMatch is not None: + if apivariant is not None: + version = major + '.' + minor + if apivariant == 'VKSC': + # Vulkan SC has a different anchor pattern for version appendices if version == '1.0': return 'Vulkan SC 1.0' else: - return f'<>' + return f'<>' else: - return f'<>' + return f'<>' else: - return 'apiext:' + name + return f'apiext:{name}' +def leafMarkupC(name): + """Markup a leaf name as a C expression, using conventions of the + Vulkan Validation Layers + + - name - version or extension name""" + + (apivariant, major, minor) = apiVersionNameMatch(name) + + if apivariant is not None: + return name + else: + return f'ext.{name}' + +opMarkupAsciidocMap = { '+' : 'and', ',' : 'or' } + +def opMarkupAsciidoc(op): + """Markup a operator as an asciidoc spec markup equivalent + + - op - operator ('+' or ',')""" + + return opMarkupAsciidocMap[op] + +opMarkupCMap = { '+' : '&&', ',' : '||' } + +def opMarkupC(op): + """Markup a operator as an C language equivalent + + - op - operator ('+' or ',')""" + + return opMarkupCMap[op] + + +# Unfortunately global to be used in pyparsing exprStack = [] def push_first(toks): @@ -130,12 +177,6 @@ _opn = { ',': operator.or_, } -# map operator symbols to corresponding words -_opname = { - '+': 'and', - ',': 'or', -} - def evaluateStack(stack, isSupported): """Evaluate an expression stack, returning a boolean result. @@ -170,42 +211,66 @@ def evaluateDependency(dependency, isSupported): val = evaluateStack(exprStack[:], isSupported) return val -def evalDependencyLanguage(stack, specmacros): +def evalDependencyLanguage(stack, leafMarkup, opMarkup, parenthesize, root): """Evaluate an expression stack, returning an English equivalent - stack - the stack - - specmacros - if True, prepare the language for spec inclusion""" + - leafMarkup, opMarkup, parenthesize - same as dependencyLanguage + - root - True only if this is the outer (root) expression level""" op, num_args = stack.pop(), 0 if isinstance(op, tuple): op, num_args = op if op in '+,': # Could parenthesize, not needed yet - rhs = evalDependencyLanguage(stack, specmacros) - return evalDependencyLanguage(stack, specmacros) + f' {_opname[op]} ' + rhs + rhs = evalDependencyLanguage(stack, leafMarkup, opMarkup, parenthesize, root = False) + opname = opMarkup(op) + lhs = evalDependencyLanguage(stack, leafMarkup, opMarkup, parenthesize, root = False) + if parenthesize and not root: + return f'({lhs} {opname} {rhs})' + else: + return f'{lhs} {opname} {rhs}' elif op[0].isalpha(): # This is an extension or feature name - if specmacros: - return nameMarkup(op) - else: - return op + return leafMarkup(op) else: raise Exception(f'invalid op: {op}') -def dependencyLanguage(dependency, specmacros = False): +def dependencyLanguage(dependency, leafMarkup, opMarkup, parenthesize): """Return an API dependency expression translated to a form suitable for asciidoctor conditionals or header file comments. - dependency - the expression - - specmacros - if False, return a string that can be used as an - asciidoctor conditional. - If True, return a string suitable for spec inclusion with macros and - xrefs included.""" + - leafMarkup - function taking an extension / version name and + returning an equivalent marked up version + - opMarkup - function taking an operator ('+' / ',') name name and + returning an equivalent marked up version + - parenthesize - True if parentheses should be used in the resulting + expression, False otherwise""" global exprStack exprStack = [] results = dependencyBNF().parseString(dependency, parseAll=True) - return evalDependencyLanguage(exprStack, specmacros) + return evalDependencyLanguage(exprStack, leafMarkup, opMarkup, parenthesize, root = True) + +# aka specmacros = False +def dependencyLanguageComment(dependency): + """Return dependency expression translated to a form suitable for + comments in headers of emitted C code, as used by the + docgenerator.""" + return dependencyLanguage(dependency, leafMarkup = markupPassthrough, opMarkup = opMarkupAsciidoc, parenthesize = True) + +# aka specmacros = True +def dependencyLanguageSpecMacros(dependency): + """Return dependency expression translated to a form suitable for + comments in headers of emitted C code, as used by the + interfacegenerator.""" + return dependencyLanguage(dependency, leafMarkup = leafMarkupAsciidoc, opMarkup = opMarkupAsciidoc, parenthesize = False) + +def dependencyLanguageC(dependency): + """Return dependency expression translated to a form suitable for + use in C expressions""" + return dependencyLanguage(dependency, leafMarkup = leafMarkupC, opMarkup = opMarkupC, parenthesize = True) def evalDependencyNames(stack): """Evaluate an expression stack, returning the set of extension and @@ -262,9 +327,9 @@ def markupTraverse(expr, level = 0, root = True): str = str + markupTraverse(elem, level = nextlevel, root = False) elif elem in ('+', ','): - str = str + f'{prefix}{_opname[elem]} +\n' + str = str + f'{prefix}{opMarkupAsciidoc(elem)} +\n' else: - str = str + f'{prefix}{nameMarkup(elem)} +\n' + str = str + f'{prefix}{leafMarkupAsciidoc(elem)} +\n' return str @@ -297,7 +362,8 @@ if __name__ == "__main__": print(dependency, f'failed eval: {dependency}') if val == expected: - print(f'{dependency} = {val} (as expected)') + True + # print(f'{dependency} = {val} (as expected)') else: print(f'{dependency} ERROR: {val} != {expected}') @@ -340,24 +406,19 @@ if __name__ == "__main__": test('true+(true,false)', True) test('true+(true,true)', True) - - #test('VK_VERSION_1_1+(false,true)', True) - #test('true', True) - #test('(true)', True) - #test('false,false', False) - #test('false,true', True) - #test('false+true', False) - #test('true+true', True) - # Check formatting for dependency in [ #'true', #'true+true+false', + 'true+false', 'true+(true+false),(false,true)', - 'true+((true+false),(false,true))', + #'true+((true+false),(false,true))', + 'VK_VERSION_1_0+VK_KHR_display', #'VK_VERSION_1_1+(true,false)', ]: print(f'expr = {dependency}\n{dependencyMarkup(dependency)}') - print(f' language = {dependencyLanguage(dependency)}') + print(f' spec language = {dependencyLanguageSpecMacros(dependency)}') + print(f' comment language = {dependencyLanguageComment(dependency)}') + print(f' C language = {dependencyLanguageC(dependency)}') print(f' names = {dependencyNames(dependency)}') print(f' value = {evaluateDependency(dependency, termSupported)}') diff --git a/registry/validusage.json b/registry/validusage.json index 90e0d67..d44738f 100644 --- a/registry/validusage.json +++ b/registry/validusage.json @@ -1,9 +1,9 @@ { "version info": { "schema version": 2, - "api version": "1.3.259", - "comment": "from git branch: github-main commit: 3da7531f2f9d48993ab627c02a866479d5163ba4", - "date": "2023-07-22 10:52:33Z" + "api version": "1.3.260", + "comment": "from git branch: github-main commit: 12ab5855b1608e4b05b270e0dedecd1b1a5458f8", + "date": "2023-07-28 10:25:36Z" }, "validation": { "vkGetInstanceProcAddr": { @@ -278,7 +278,7 @@ }, { "vuid": "VUID-VkPhysicalDeviceProperties2-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceAccelerationStructurePropertiesKHR, VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceCooperativeMatrixPropertiesKHR, VkPhysicalDeviceCooperativeMatrixPropertiesNV, VkPhysicalDeviceCopyMemoryIndirectPropertiesNV, VkPhysicalDeviceCustomBorderColorPropertiesEXT, VkPhysicalDeviceDepthStencilResolveProperties, VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT, VkPhysicalDeviceDescriptorBufferPropertiesEXT, VkPhysicalDeviceDescriptorIndexingProperties, VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceDisplacementMicromapPropertiesNV, VkPhysicalDeviceDriverProperties, VkPhysicalDeviceDrmPropertiesEXT, VkPhysicalDeviceExtendedDynamicState3PropertiesEXT, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceFloatControlsProperties, VkPhysicalDeviceFragmentDensityMap2PropertiesEXT, VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM, VkPhysicalDeviceFragmentDensityMapPropertiesEXT, VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR, VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV, VkPhysicalDeviceFragmentShadingRatePropertiesKHR, VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT, VkPhysicalDeviceHostImageCopyPropertiesEXT, VkPhysicalDeviceIDProperties, VkPhysicalDeviceImageProcessingPropertiesQCOM, VkPhysicalDeviceInlineUniformBlockProperties, VkPhysicalDeviceLineRasterizationPropertiesEXT, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMaintenance4Properties, VkPhysicalDeviceMemoryDecompressionPropertiesNV, VkPhysicalDeviceMeshShaderPropertiesEXT, VkPhysicalDeviceMeshShaderPropertiesNV, VkPhysicalDeviceMultiDrawPropertiesEXT, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDeviceOpacityMicromapPropertiesEXT, VkPhysicalDeviceOpticalFlowPropertiesNV, VkPhysicalDevicePCIBusInfoPropertiesEXT, VkPhysicalDevicePerformanceQueryPropertiesKHR, VkPhysicalDevicePipelineRobustnessPropertiesEXT, VkPhysicalDevicePointClippingProperties, VkPhysicalDevicePortabilitySubsetPropertiesKHR, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDeviceProvokingVertexPropertiesEXT, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV, VkPhysicalDeviceRayTracingPipelinePropertiesKHR, VkPhysicalDeviceRayTracingPropertiesNV, VkPhysicalDeviceRobustness2PropertiesEXT, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxProperties, VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM, VkPhysicalDeviceShaderCoreProperties2AMD, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceShaderCorePropertiesARM, VkPhysicalDeviceShaderIntegerDotProductProperties, VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT, VkPhysicalDeviceShaderObjectPropertiesEXT, VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, VkPhysicalDeviceShaderTileImagePropertiesEXT, VkPhysicalDeviceShadingRateImagePropertiesNV, VkPhysicalDeviceSubgroupProperties, VkPhysicalDeviceSubgroupSizeControlProperties, VkPhysicalDeviceSubpassShadingPropertiesHUAWEI, VkPhysicalDeviceTexelBufferAlignmentProperties, VkPhysicalDeviceTimelineSemaphoreProperties, VkPhysicalDeviceTransformFeedbackPropertiesEXT, VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT, VkPhysicalDeviceVulkan11Properties, VkPhysicalDeviceVulkan12Properties, or VkPhysicalDeviceVulkan13Properties" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPhysicalDeviceAccelerationStructurePropertiesKHR, VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI, VkPhysicalDeviceConservativeRasterizationPropertiesEXT, VkPhysicalDeviceCooperativeMatrixPropertiesKHR, VkPhysicalDeviceCooperativeMatrixPropertiesNV, VkPhysicalDeviceCopyMemoryIndirectPropertiesNV, VkPhysicalDeviceCustomBorderColorPropertiesEXT, VkPhysicalDeviceDepthStencilResolveProperties, VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT, VkPhysicalDeviceDescriptorBufferPropertiesEXT, VkPhysicalDeviceDescriptorIndexingProperties, VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV, VkPhysicalDeviceDiscardRectanglePropertiesEXT, VkPhysicalDeviceDisplacementMicromapPropertiesNV, VkPhysicalDeviceDriverProperties, VkPhysicalDeviceDrmPropertiesEXT, VkPhysicalDeviceExtendedDynamicState3PropertiesEXT, VkPhysicalDeviceExternalMemoryHostPropertiesEXT, VkPhysicalDeviceFloatControlsProperties, VkPhysicalDeviceFragmentDensityMap2PropertiesEXT, VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM, VkPhysicalDeviceFragmentDensityMapPropertiesEXT, VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR, VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV, VkPhysicalDeviceFragmentShadingRatePropertiesKHR, VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT, VkPhysicalDeviceHostImageCopyPropertiesEXT, VkPhysicalDeviceIDProperties, VkPhysicalDeviceImageProcessingPropertiesQCOM, VkPhysicalDeviceInlineUniformBlockProperties, VkPhysicalDeviceLineRasterizationPropertiesEXT, VkPhysicalDeviceMaintenance3Properties, VkPhysicalDeviceMaintenance4Properties, VkPhysicalDeviceMaintenance5PropertiesKHR, VkPhysicalDeviceMemoryDecompressionPropertiesNV, VkPhysicalDeviceMeshShaderPropertiesEXT, VkPhysicalDeviceMeshShaderPropertiesNV, VkPhysicalDeviceMultiDrawPropertiesEXT, VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, VkPhysicalDeviceMultiviewProperties, VkPhysicalDeviceOpacityMicromapPropertiesEXT, VkPhysicalDeviceOpticalFlowPropertiesNV, VkPhysicalDevicePCIBusInfoPropertiesEXT, VkPhysicalDevicePerformanceQueryPropertiesKHR, VkPhysicalDevicePipelineRobustnessPropertiesEXT, VkPhysicalDevicePointClippingProperties, VkPhysicalDevicePortabilitySubsetPropertiesKHR, VkPhysicalDeviceProtectedMemoryProperties, VkPhysicalDeviceProvokingVertexPropertiesEXT, VkPhysicalDevicePushDescriptorPropertiesKHR, VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV, VkPhysicalDeviceRayTracingPipelinePropertiesKHR, VkPhysicalDeviceRayTracingPropertiesNV, VkPhysicalDeviceRobustness2PropertiesEXT, VkPhysicalDeviceSampleLocationsPropertiesEXT, VkPhysicalDeviceSamplerFilterMinmaxProperties, VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM, VkPhysicalDeviceShaderCoreProperties2AMD, VkPhysicalDeviceShaderCorePropertiesAMD, VkPhysicalDeviceShaderCorePropertiesARM, VkPhysicalDeviceShaderEnqueuePropertiesAMDX, VkPhysicalDeviceShaderIntegerDotProductProperties, VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT, VkPhysicalDeviceShaderObjectPropertiesEXT, VkPhysicalDeviceShaderSMBuiltinsPropertiesNV, VkPhysicalDeviceShaderTileImagePropertiesEXT, VkPhysicalDeviceShadingRateImagePropertiesNV, VkPhysicalDeviceSubgroupProperties, VkPhysicalDeviceSubgroupSizeControlProperties, VkPhysicalDeviceSubpassShadingPropertiesHUAWEI, VkPhysicalDeviceTexelBufferAlignmentProperties, VkPhysicalDeviceTimelineSemaphoreProperties, VkPhysicalDeviceTransformFeedbackPropertiesEXT, VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT, VkPhysicalDeviceVulkan11Properties, VkPhysicalDeviceVulkan12Properties, or VkPhysicalDeviceVulkan13Properties" }, { "vuid": "VUID-VkPhysicalDeviceProperties2-sType-unique", @@ -678,7 +678,7 @@ }, { "vuid": "VUID-VkDeviceCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceDeviceMemoryReportCreateInfoEXT, VkDeviceDiagnosticsConfigCreateInfoNV, VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkDevicePrivateDataCreateInfo, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice4444FormatsFeaturesEXT, VkPhysicalDevice8BitStorageFeatures, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceAccelerationStructureFeaturesKHR, VkPhysicalDeviceAddressBindingReportFeaturesEXT, VkPhysicalDeviceAmigoProfilingFeaturesSEC, VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT, VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBorderColorSwizzleFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeatures, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceColorWriteEnableFeaturesEXT, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesKHR, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCopyMemoryIndirectFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceCustomBorderColorFeaturesEXT, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthBiasControlFeaturesEXT, VkPhysicalDeviceDepthClampZeroOneFeaturesEXT, VkPhysicalDeviceDepthClipControlFeaturesEXT, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorBufferFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeatures, VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV, VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, VkPhysicalDeviceDeviceMemoryReportFeaturesEXT, VkPhysicalDeviceDiagnosticsConfigFeaturesNV, VkPhysicalDeviceDisplacementMicromapFeaturesNV, VkPhysicalDeviceDynamicRenderingFeatures, VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceExtendedDynamicState2FeaturesEXT, VkPhysicalDeviceExtendedDynamicState3FeaturesEXT, VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, VkPhysicalDeviceExternalMemoryRDMAFeaturesNV, VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX, VkPhysicalDeviceFaultFeaturesEXT, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMap2FeaturesEXT, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM, VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV, VkPhysicalDeviceFragmentShadingRateFeaturesKHR, VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR, VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, VkPhysicalDeviceHostImageCopyFeaturesEXT, VkPhysicalDeviceHostQueryResetFeatures, VkPhysicalDeviceImage2DViewOf3DFeaturesEXT, VkPhysicalDeviceImageCompressionControlFeaturesEXT, VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, VkPhysicalDeviceImageProcessingFeaturesQCOM, VkPhysicalDeviceImageRobustnessFeatures, VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT, VkPhysicalDeviceImageViewMinLodFeaturesEXT, VkPhysicalDeviceImagelessFramebufferFeatures, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInheritedViewportScissorFeaturesNV, VkPhysicalDeviceInlineUniformBlockFeatures, VkPhysicalDeviceInvocationMaskFeaturesHUAWEI, VkPhysicalDeviceLegacyDitheringFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceLinearColorAttachmentFeaturesNV, VkPhysicalDeviceMaintenance4Features, VkPhysicalDeviceMemoryDecompressionFeaturesNV, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiDrawFeaturesEXT, VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM, VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT, VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT, VkPhysicalDeviceOpacityMicromapFeaturesEXT, VkPhysicalDeviceOpticalFlowFeaturesNV, VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineCreationCacheControlFeatures, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT, VkPhysicalDevicePipelinePropertiesFeaturesEXT, VkPhysicalDevicePipelineProtectedAccessFeaturesEXT, VkPhysicalDevicePipelineRobustnessFeaturesEXT, VkPhysicalDevicePortabilitySubsetFeaturesKHR, VkPhysicalDevicePresentBarrierFeaturesNV, VkPhysicalDevicePresentIdFeaturesKHR, VkPhysicalDevicePresentWaitFeaturesKHR, VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, VkPhysicalDevicePrivateDataFeatures, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceProvokingVertexFeaturesEXT, VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT, VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, VkPhysicalDeviceRayQueryFeaturesKHR, VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV, VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR, VkPhysicalDeviceRayTracingMotionBlurFeaturesNV, VkPhysicalDeviceRayTracingPipelineFeaturesKHR, VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceRobustness2FeaturesEXT, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeatures, VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT, VkPhysicalDeviceShaderAtomicFloatFeaturesEXT, VkPhysicalDeviceShaderAtomicInt64Features, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM, VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, VkPhysicalDeviceShaderFloat16Int8Features, VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerDotProductFeatures, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT, VkPhysicalDeviceShaderObjectFeaturesEXT, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, VkPhysicalDeviceShaderTerminateInvocationFeatures, VkPhysicalDeviceShaderTileImageFeaturesEXT, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeatures, VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT, VkPhysicalDeviceSubpassShadingFeaturesHUAWEI, VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT, VkPhysicalDeviceSynchronization2Features, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeatures, VkPhysicalDeviceTilePropertiesFeaturesQCOM, VkPhysicalDeviceTimelineSemaphoreFeatures, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeatures, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT, VkPhysicalDeviceVulkan11Features, VkPhysicalDeviceVulkan12Features, VkPhysicalDeviceVulkan13Features, VkPhysicalDeviceVulkanMemoryModelFeatures, VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, VkPhysicalDeviceYcbcrImageArraysFeaturesEXT, or VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDeviceDeviceMemoryReportCreateInfoEXT, VkDeviceDiagnosticsConfigCreateInfoNV, VkDeviceGroupDeviceCreateInfo, VkDeviceMemoryOverallocationCreateInfoAMD, VkDevicePrivateDataCreateInfo, VkPhysicalDevice16BitStorageFeatures, VkPhysicalDevice4444FormatsFeaturesEXT, VkPhysicalDevice8BitStorageFeatures, VkPhysicalDeviceASTCDecodeFeaturesEXT, VkPhysicalDeviceAccelerationStructureFeaturesKHR, VkPhysicalDeviceAddressBindingReportFeaturesEXT, VkPhysicalDeviceAmigoProfilingFeaturesSEC, VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT, VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, VkPhysicalDeviceBorderColorSwizzleFeaturesEXT, VkPhysicalDeviceBufferDeviceAddressFeatures, VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI, VkPhysicalDeviceCoherentMemoryFeaturesAMD, VkPhysicalDeviceColorWriteEnableFeaturesEXT, VkPhysicalDeviceComputeShaderDerivativesFeaturesNV, VkPhysicalDeviceConditionalRenderingFeaturesEXT, VkPhysicalDeviceCooperativeMatrixFeaturesKHR, VkPhysicalDeviceCooperativeMatrixFeaturesNV, VkPhysicalDeviceCopyMemoryIndirectFeaturesNV, VkPhysicalDeviceCornerSampledImageFeaturesNV, VkPhysicalDeviceCoverageReductionModeFeaturesNV, VkPhysicalDeviceCustomBorderColorFeaturesEXT, VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, VkPhysicalDeviceDepthBiasControlFeaturesEXT, VkPhysicalDeviceDepthClampZeroOneFeaturesEXT, VkPhysicalDeviceDepthClipControlFeaturesEXT, VkPhysicalDeviceDepthClipEnableFeaturesEXT, VkPhysicalDeviceDescriptorBufferFeaturesEXT, VkPhysicalDeviceDescriptorIndexingFeatures, VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV, VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, VkPhysicalDeviceDeviceMemoryReportFeaturesEXT, VkPhysicalDeviceDiagnosticsConfigFeaturesNV, VkPhysicalDeviceDisplacementMicromapFeaturesNV, VkPhysicalDeviceDynamicRenderingFeatures, VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT, VkPhysicalDeviceExclusiveScissorFeaturesNV, VkPhysicalDeviceExtendedDynamicState2FeaturesEXT, VkPhysicalDeviceExtendedDynamicState3FeaturesEXT, VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, VkPhysicalDeviceExternalMemoryRDMAFeaturesNV, VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX, VkPhysicalDeviceFaultFeaturesEXT, VkPhysicalDeviceFeatures2, VkPhysicalDeviceFragmentDensityMap2FeaturesEXT, VkPhysicalDeviceFragmentDensityMapFeaturesEXT, VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM, VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR, VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV, VkPhysicalDeviceFragmentShadingRateFeaturesKHR, VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR, VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, VkPhysicalDeviceHostImageCopyFeaturesEXT, VkPhysicalDeviceHostQueryResetFeatures, VkPhysicalDeviceImage2DViewOf3DFeaturesEXT, VkPhysicalDeviceImageCompressionControlFeaturesEXT, VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, VkPhysicalDeviceImageProcessingFeaturesQCOM, VkPhysicalDeviceImageRobustnessFeatures, VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT, VkPhysicalDeviceImageViewMinLodFeaturesEXT, VkPhysicalDeviceImagelessFramebufferFeatures, VkPhysicalDeviceIndexTypeUint8FeaturesEXT, VkPhysicalDeviceInheritedViewportScissorFeaturesNV, VkPhysicalDeviceInlineUniformBlockFeatures, VkPhysicalDeviceInvocationMaskFeaturesHUAWEI, VkPhysicalDeviceLegacyDitheringFeaturesEXT, VkPhysicalDeviceLineRasterizationFeaturesEXT, VkPhysicalDeviceLinearColorAttachmentFeaturesNV, VkPhysicalDeviceMaintenance4Features, VkPhysicalDeviceMaintenance5FeaturesKHR, VkPhysicalDeviceMemoryDecompressionFeaturesNV, VkPhysicalDeviceMemoryPriorityFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesEXT, VkPhysicalDeviceMeshShaderFeaturesNV, VkPhysicalDeviceMultiDrawFeaturesEXT, VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, VkPhysicalDeviceMultiviewFeatures, VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM, VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT, VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT, VkPhysicalDeviceOpacityMicromapFeaturesEXT, VkPhysicalDeviceOpticalFlowFeaturesNV, VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, VkPhysicalDevicePerformanceQueryFeaturesKHR, VkPhysicalDevicePipelineCreationCacheControlFeatures, VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT, VkPhysicalDevicePipelinePropertiesFeaturesEXT, VkPhysicalDevicePipelineProtectedAccessFeaturesEXT, VkPhysicalDevicePipelineRobustnessFeaturesEXT, VkPhysicalDevicePortabilitySubsetFeaturesKHR, VkPhysicalDevicePresentBarrierFeaturesNV, VkPhysicalDevicePresentIdFeaturesKHR, VkPhysicalDevicePresentWaitFeaturesKHR, VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, VkPhysicalDevicePrivateDataFeatures, VkPhysicalDeviceProtectedMemoryFeatures, VkPhysicalDeviceProvokingVertexFeaturesEXT, VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT, VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, VkPhysicalDeviceRayQueryFeaturesKHR, VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV, VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR, VkPhysicalDeviceRayTracingMotionBlurFeaturesNV, VkPhysicalDeviceRayTracingPipelineFeaturesKHR, VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR, VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV, VkPhysicalDeviceRobustness2FeaturesEXT, VkPhysicalDeviceSamplerYcbcrConversionFeatures, VkPhysicalDeviceScalarBlockLayoutFeatures, VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures, VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT, VkPhysicalDeviceShaderAtomicFloatFeaturesEXT, VkPhysicalDeviceShaderAtomicInt64Features, VkPhysicalDeviceShaderClockFeaturesKHR, VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM, VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures, VkPhysicalDeviceShaderDrawParametersFeatures, VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, VkPhysicalDeviceShaderEnqueueFeaturesAMDX, VkPhysicalDeviceShaderFloat16Int8Features, VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT, VkPhysicalDeviceShaderImageFootprintFeaturesNV, VkPhysicalDeviceShaderIntegerDotProductFeatures, VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT, VkPhysicalDeviceShaderObjectFeaturesEXT, VkPhysicalDeviceShaderSMBuiltinsFeaturesNV, VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures, VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, VkPhysicalDeviceShaderTerminateInvocationFeatures, VkPhysicalDeviceShaderTileImageFeaturesEXT, VkPhysicalDeviceShadingRateImageFeaturesNV, VkPhysicalDeviceSubgroupSizeControlFeatures, VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT, VkPhysicalDeviceSubpassShadingFeaturesHUAWEI, VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT, VkPhysicalDeviceSynchronization2Features, VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT, VkPhysicalDeviceTextureCompressionASTCHDRFeatures, VkPhysicalDeviceTilePropertiesFeaturesQCOM, VkPhysicalDeviceTimelineSemaphoreFeatures, VkPhysicalDeviceTransformFeedbackFeaturesEXT, VkPhysicalDeviceUniformBufferStandardLayoutFeatures, VkPhysicalDeviceVariablePointersFeatures, VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT, VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT, VkPhysicalDeviceVulkan11Features, VkPhysicalDeviceVulkan12Features, VkPhysicalDeviceVulkan13Features, VkPhysicalDeviceVulkanMemoryModelFeatures, VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, VkPhysicalDeviceYcbcrImageArraysFeaturesEXT, or VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures" }, { "vuid": "VUID-VkDeviceCreateInfo-sType-unique", @@ -1196,6 +1196,10 @@ }, "VkCommandBufferBeginInfo": { "core": [ + { + "vuid": "VUID-VkCommandBufferBeginInfo-flags-09123", + "text": " If flags contains VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, the VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, { "vuid": "VUID-VkCommandBufferBeginInfo-flags-00055", "text": " If flags contains VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, the framebuffer member of pInheritanceInfo must be either VK_NULL_HANDLE, or a valid VkFramebuffer that is compatible with the renderPass member of pInheritanceInfo" @@ -7114,6 +7118,34 @@ } ] }, + "vkGetRenderingAreaGranularityKHR": { + "core": [ + { + "vuid": "VUID-vkGetRenderingAreaGranularityKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetRenderingAreaGranularityKHR-pRenderingAreaInfo-parameter", + "text": " pRenderingAreaInfo must be a valid pointer to a valid VkRenderingAreaInfoKHR structure" + }, + { + "vuid": "VUID-vkGetRenderingAreaGranularityKHR-pGranularity-parameter", + "text": " pGranularity must be a valid pointer to a VkExtent2D structure" + } + ] + }, + "VkRenderingAreaInfoKHR": { + "core": [ + { + "vuid": "VUID-VkRenderingAreaInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR" + }, + { + "vuid": "VUID-VkRenderingAreaInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + } + ] + }, "vkCmdEndRendering": { "core": [ { @@ -11108,22 +11140,6 @@ "vuid": "VUID-VkComputePipelineCreateInfo-layout-07991", "text": " If a resource variables is declared in a shader as an array, a descriptor slot in layout must match the descriptor count" }, - { - "vuid": "VUID-VkComputePipelineCreateInfo-stage-00701", - "text": " The stage member of stage must be VK_SHADER_STAGE_COMPUTE_BIT" - }, - { - "vuid": "VUID-VkComputePipelineCreateInfo-stage-00702", - "text": " The shader code for the entry point identified by stage and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter" - }, - { - "vuid": "VUID-VkComputePipelineCreateInfo-layout-01687", - "text": " The number of resources in layout accessible to the compute shader stage must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" - }, - { - "vuid": "VUID-VkComputePipelineCreateInfo-flags-03364", - "text": " flags must not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" - }, { "vuid": "VUID-VkComputePipelineCreateInfo-flags-03365", "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR" @@ -11168,6 +11184,26 @@ "vuid": "VUID-VkComputePipelineCreateInfo-pipelineCreationCacheControl-02875", "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT" }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-stage-00701", + "text": " The stage member of stage must be VK_SHADER_STAGE_COMPUTE_BIT" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-stage-00702", + "text": " The shader code for the entry point identified by stage and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-layout-01687", + "text": " The number of resources in layout accessible to the compute shader stage must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-shaderEnqueue-09177", + "text": " If shaderEnqueue is not enabled, flags must not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR" + }, + { + "vuid": "VUID-VkComputePipelineCreateInfo-flags-09178", + "text": " If flags does not include VK_PIPELINE_CREATE_LIBRARY_BIT_KHR, the shader specified by stage must not declare the ShaderEnqueueAMDX capability" + }, { "vuid": "VUID-VkComputePipelineCreateInfo-pipelineStageCreationFeedbackCount-06566", "text": " If VkPipelineCreationFeedbackCreateInfo::pipelineStageCreationFeedbackCount is not 0, it must be 1" @@ -11186,7 +11222,7 @@ }, { "vuid": "VUID-VkComputePipelineCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfo, VkPipelineRobustnessCreateInfoEXT, or VkSubpassShadingPipelineCreateInfoHUAWEI" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreateFlags2CreateInfoKHR, VkPipelineCreationFeedbackCreateInfo, VkPipelineRobustnessCreateInfoEXT, or VkSubpassShadingPipelineCreateInfoHUAWEI" }, { "vuid": "VUID-VkComputePipelineCreateInfo-sType-unique", @@ -11341,8 +11377,8 @@ "text": " If a shader module identifier is not specified for this stage, module must be a valid VkShaderModule or there must be a valid VkShaderModuleCreateInfo structure in the pNext chain" }, { - "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-06846", - "text": " If a shader module identifier is not specified for this stage, and the graphicsPipelineLibrary feature is not enabled, module must be a valid VkShaderModule" + "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-08771", + "text": " If a shader module identifier is not specified for this stage, and neither the maintenance5 nor graphicsPipelineLibrary feature are enabled, module must be a valid VkShaderModule" }, { "vuid": "VUID-VkPipelineShaderStageCreateInfo-stage-06848", @@ -11358,7 +11394,7 @@ }, { "vuid": "VUID-VkPipelineShaderStageCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDebugUtilsObjectNameInfoEXT, VkPipelineRobustnessCreateInfoEXT, VkPipelineShaderStageModuleIdentifierCreateInfoEXT, VkPipelineShaderStageRequiredSubgroupSizeCreateInfo, VkShaderModuleCreateInfo, or VkShaderModuleValidationCacheCreateInfoEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkDebugUtilsObjectNameInfoEXT, VkPipelineRobustnessCreateInfoEXT, VkPipelineShaderStageModuleIdentifierCreateInfoEXT, VkPipelineShaderStageNodeCreateInfoAMDX, VkPipelineShaderStageRequiredSubgroupSizeCreateInfo, VkShaderModuleCreateInfo, or VkShaderModuleValidationCacheCreateInfoEXT" }, { "vuid": "VUID-VkPipelineShaderStageCreateInfo-sType-unique", @@ -11749,20 +11785,24 @@ "text": " If the pipeline is being created with pre-rasterization shader state and vertex input state and the topology member of pInputAssembly is VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, and either VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY dynamic state is not enabled or dynamicPrimitiveTopologyUnrestricted is VK_FALSE, then pStages must include tessellation shader stages" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-topology-08890", - "text": " If the pipeline is being created with a Vertex {ExecutionModel} and no TessellationEvaluation or Geometry {ExecutionModel}, and the topology member of pInputAssembly is VK_PRIMITIVE_TOPOLOGY_POINT_LIST, and either VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY dynamic state is not enabled or dynamicPrimitiveTopologyUnrestricted is VK_FALSE, a PointSize decorated variable must be written to" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-topology-08808", + "text": " If the pipeline is being created with a Vertex {ExecutionModel} and no TessellationEvaluation or Geometry {ExecutionModel}, and the topology member of pInputAssembly is VK_PRIMITIVE_TOPOLOGY_POINT_LIST,and either VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY dynamic state is not enabled or dynamicPrimitiveTopologyUnrestricted is VK_FALSE, a PointSize decorated variable must be written to if maintenance5 is not enabled" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-TessellationEvaluation-07723", - "text": " If the pipeline is being created with a TessellationEvaluation {ExecutionModel}, no Geometry {ExecutionModel}, uses the PointMode {ExecutionMode}, and shaderTessellationAndGeometryPointSize is enabled, a PointSize decorated variable must be written to" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-shaderTessellationAndGeometryPointSize-08774", + "text": " If the pipeline is being created with a TessellationEvaluation {ExecutionModel}, no Geometry {ExecutionModel}, uses the PointMode {ExecutionMode}, and shaderTessellationAndGeometryPointSize is enabled, a PointSize decorated variable must be written to if maintenance5 is not enabled" + }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-maintenance5-08775", + "text": " If maintenance5 is enabled and a PointSize decorated variable is written to, all execution paths must write to a PointSize decorated variable" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-TessellationEvaluation-07724", "text": " If the pipeline is being created with a TessellationEvaluation {ExecutionModel}, no Geometry {ExecutionModel}, uses the PointMode {ExecutionMode}, and shaderTessellationAndGeometryPointSize is not enabled, a PointSize decorated variable must not be written to" }, { - "vuid": "VUID-VkGraphicsPipelineCreateInfo-Geometry-07725", - "text": " If the pipeline is being created with a Geometry {ExecutionModel}, uses the OutputPoints {ExecutionMode}, and shaderTessellationAndGeometryPointSize is enabled, a PointSize decorated variable must be written to for every vertex emitted" + "vuid": "VUID-VkGraphicsPipelineCreateInfo-shaderTessellationAndGeometryPointSize-08776", + "text": " If the pipeline is being created with a Geometry {ExecutionModel}, uses the OutputPoints {ExecutionMode}, and shaderTessellationAndGeometryPointSize is enabled, a PointSize decorated variable must be written to for every vertex emitted if maintenance5 is not enabled" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-Geometry-07726", @@ -12910,7 +12950,7 @@ }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkAttachmentSampleCountInfoAMD, VkGraphicsPipelineLibraryCreateInfoEXT, VkGraphicsPipelineShaderGroupsCreateInfoNV, VkMultiviewPerViewAttributesInfoNVX, VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreationFeedbackCreateInfo, VkPipelineDiscardRectangleStateCreateInfoEXT, VkPipelineFragmentShadingRateEnumStateCreateInfoNV, VkPipelineFragmentShadingRateStateCreateInfoKHR, VkPipelineLibraryCreateInfoKHR, VkPipelineRenderingCreateInfo, VkPipelineRepresentativeFragmentTestStateCreateInfoNV, or VkPipelineRobustnessCreateInfoEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkAttachmentSampleCountInfoAMD, VkGraphicsPipelineLibraryCreateInfoEXT, VkGraphicsPipelineShaderGroupsCreateInfoNV, VkMultiviewPerViewAttributesInfoNVX, VkPipelineCompilerControlCreateInfoAMD, VkPipelineCreateFlags2CreateInfoKHR, VkPipelineCreationFeedbackCreateInfo, VkPipelineDiscardRectangleStateCreateInfoEXT, VkPipelineFragmentShadingRateEnumStateCreateInfoNV, VkPipelineFragmentShadingRateStateCreateInfoKHR, VkPipelineLibraryCreateInfoKHR, VkPipelineRenderingCreateInfo, VkPipelineRepresentativeFragmentTestStateCreateInfoNV, or VkPipelineRobustnessCreateInfoEXT" }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-sType-unique", @@ -12938,6 +12978,22 @@ } ] }, + "VkPipelineCreateFlags2CreateInfoKHR": { + "core": [ + { + "vuid": "VUID-VkPipelineCreateFlags2CreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkPipelineCreateFlags2CreateInfoKHR-flags-parameter", + "text": " flags must be a valid combination of VkPipelineCreateFlagBits2KHR values" + }, + { + "vuid": "VUID-VkPipelineCreateFlags2CreateInfoKHR-flags-requiredbitmask", + "text": " flags must not be 0" + } + ] + }, "VkGraphicsPipelineLibraryCreateInfoEXT": { "core": [ { @@ -13298,7 +13354,7 @@ }, { "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkPipelineCreationFeedbackCreateInfo" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCreateFlags2CreateInfoKHR or VkPipelineCreationFeedbackCreateInfo" }, { "vuid": "VUID-VkRayTracingPipelineCreateInfoNV-sType-unique", @@ -13502,7 +13558,7 @@ }, { "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCreationFeedbackCreateInfo or VkPipelineRobustnessCreateInfoEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCreateFlags2CreateInfoKHR, VkPipelineCreationFeedbackCreateInfo, or VkPipelineRobustnessCreateInfoEXT" }, { "vuid": "VUID-VkRayTracingPipelineCreateInfoKHR-sType-unique", @@ -16482,7 +16538,7 @@ }, { "vuid": "VUID-VkBufferCreateInfo-pNext-pNext", - "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkBufferCollectionBufferCreateInfoFUCHSIA, VkBufferDeviceAddressCreateInfoEXT, VkBufferOpaqueCaptureAddressCreateInfo, VkDedicatedAllocationBufferCreateInfoNV, VkExternalMemoryBufferCreateInfo, VkOpaqueCaptureDescriptorDataCreateInfoEXT, or VkVideoProfileListInfoKHR" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkBufferCollectionBufferCreateInfoFUCHSIA, VkBufferDeviceAddressCreateInfoEXT, VkBufferOpaqueCaptureAddressCreateInfo, VkBufferUsageFlags2CreateInfoKHR, VkDedicatedAllocationBufferCreateInfoNV, VkExternalMemoryBufferCreateInfo, VkOpaqueCaptureDescriptorDataCreateInfoEXT, or VkVideoProfileListInfoKHR" }, { "vuid": "VUID-VkBufferCreateInfo-sType-unique", @@ -16506,6 +16562,22 @@ } ] }, + "VkBufferUsageFlags2CreateInfoKHR": { + "core": [ + { + "vuid": "VUID-VkBufferUsageFlags2CreateInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR" + }, + { + "vuid": "VUID-VkBufferUsageFlags2CreateInfoKHR-usage-parameter", + "text": " usage must be a valid combination of VkBufferUsageFlagBits2KHR values" + }, + { + "vuid": "VUID-VkBufferUsageFlags2CreateInfoKHR-usage-requiredbitmask", + "text": " usage must not be 0" + } + ] + }, "VkDedicatedAllocationBufferCreateInfoNV": { "core": [ { @@ -16641,12 +16713,12 @@ "text": " buffer must have been created with a usage value containing at least one of VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT or VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT" }, { - "vuid": "VUID-VkBufferViewCreateInfo-buffer-00933", - "text": " If buffer was created with usage containing VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, then format features of format must contain VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT" + "vuid": "VUID-VkBufferViewCreateInfo-format-08778", + "text": " If the buffer view usage contains VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, then format features of format must contain VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT" }, { - "vuid": "VUID-VkBufferViewCreateInfo-buffer-00934", - "text": " If buffer was created with usage containing VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, then format features of format must contain VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT" + "vuid": "VUID-VkBufferViewCreateInfo-format-08779", + "text": " If the buffer view usage contains VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, then format features of format must contain VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT" }, { "vuid": "VUID-VkBufferViewCreateInfo-buffer-00935", @@ -16668,13 +16740,21 @@ "vuid": "VUID-VkBufferViewCreateInfo-pNext-06782", "text": " If the pNext chain includes a VkExportMetalObjectCreateInfoEXT structure, its exportObjectType member must be VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT" }, + { + "vuid": "VUID-VkBufferViewCreateInfo-pNext-08780", + "text": " If the pNext chain includes a VkBufferUsageFlags2CreateInfoKHR, its usage must not contain any other bit than VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR or VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR" + }, + { + "vuid": "VUID-VkBufferViewCreateInfo-pNext-08781", + "text": " If the pNext chain includes a VkBufferUsageFlags2CreateInfoKHR, its usage must be a subset of the VkBufferCreateInfo::usage specified or VkBufferUsageFlags2CreateInfoKHR::usage from VkBufferCreateInfo::pNext when creating buffer" + }, { "vuid": "VUID-VkBufferViewCreateInfo-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO" }, { "vuid": "VUID-VkBufferViewCreateInfo-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkExportMetalObjectCreateInfoEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkBufferUsageFlags2CreateInfoKHR or VkExportMetalObjectCreateInfoEXT" }, { "vuid": "VUID-VkBufferViewCreateInfo-sType-unique", @@ -17566,98 +17646,98 @@ } ] }, - "vkGetImageSubresourceLayout2EXT": { + "vkGetImageSubresourceLayout2KHR": { "core": [ { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-aspectMask-00997", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-aspectMask-00997", "text": " The aspectMask member of pSubresource must only have a single bit set" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-mipLevel-01716", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-mipLevel-01716", "text": " The mipLevel member of pSubresource must be less than the mipLevels specified in VkImageCreateInfo when image was created" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-arrayLayer-01717", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-arrayLayer-01717", "text": " The arrayLayer member of pSubresource must be less than the arrayLayers specified in VkImageCreateInfo when image was created" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-format-08886", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-format-08886", "text": " If format of the image is a color format, tiling of the image is VK_IMAGE_TILING_LINEAR or VK_IMAGE_TILING_OPTIMAL, and does not have a multi-planar image format, the aspectMask member of pSubresource must be VK_IMAGE_ASPECT_COLOR_BIT" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-format-04462", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-format-04462", "text": " If format of the image has a depth component, the aspectMask member of pSubresource must contain VK_IMAGE_ASPECT_DEPTH_BIT" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-format-04463", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-format-04463", "text": " If format of the image has a stencil component, the aspectMask member of pSubresource must contain VK_IMAGE_ASPECT_STENCIL_BIT" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-format-04464", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-format-04464", "text": " If format of the image does not contain a stencil or depth component, the aspectMask member of pSubresource must not contain VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-tiling-08717", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-tiling-08717", "text": " If the tiling of the image is VK_IMAGE_TILING_LINEAR and has a multi-planar image format, then the aspectMask member of pSubresource must be a single valid multi-planar aspect mask" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-image-01895", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-image-01895", "text": " If image was created with the VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID external memory handle type, then image must be bound to memory" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-tiling-02271", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-tiling-02271", "text": " If the tiling of the image is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, then the aspectMask member of pSubresource must be VK_IMAGE_ASPECT_MEMORY_PLANE{ibit}BIT_EXT and the index i must be less than the VkDrmFormatModifierPropertiesEXT::drmFormatModifierPlaneCount associated with the image’s format and VkImageDrmFormatModifierPropertiesEXT::drmFormatModifier" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-device-parameter", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-device-parameter", "text": " device must be a valid VkDevice handle" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-image-parameter", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-image-parameter", "text": " image must be a valid VkImage handle" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-pSubresource-parameter", - "text": " pSubresource must be a valid pointer to a valid VkImageSubresource2EXT structure" + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-pSubresource-parameter", + "text": " pSubresource must be a valid pointer to a valid VkImageSubresource2KHR structure" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-pLayout-parameter", - "text": " pLayout must be a valid pointer to a VkSubresourceLayout2EXT structure" + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-pLayout-parameter", + "text": " pLayout must be a valid pointer to a VkSubresourceLayout2KHR structure" }, { - "vuid": "VUID-vkGetImageSubresourceLayout2EXT-image-parent", + "vuid": "VUID-vkGetImageSubresourceLayout2KHR-image-parent", "text": " image must have been created, allocated, or retrieved from device" } ] }, - "VkImageSubresource2EXT": { + "VkImageSubresource2KHR": { "core": [ { - "vuid": "VUID-VkImageSubresource2EXT-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT" + "vuid": "VUID-VkImageSubresource2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR" }, { - "vuid": "VUID-VkImageSubresource2EXT-pNext-pNext", + "vuid": "VUID-VkImageSubresource2KHR-pNext-pNext", "text": " pNext must be NULL" }, { - "vuid": "VUID-VkImageSubresource2EXT-imageSubresource-parameter", + "vuid": "VUID-VkImageSubresource2KHR-imageSubresource-parameter", "text": " imageSubresource must be a valid VkImageSubresource structure" } ] }, - "VkSubresourceLayout2EXT": { + "VkSubresourceLayout2KHR": { "core": [ { - "vuid": "VUID-VkSubresourceLayout2EXT-sType-sType", - "text": " sType must be VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT" + "vuid": "VUID-VkSubresourceLayout2KHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR" }, { - "vuid": "VUID-VkSubresourceLayout2EXT-pNext-pNext", + "vuid": "VUID-VkSubresourceLayout2KHR-pNext-pNext", "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkImageCompressionPropertiesEXT or VkSubresourceHostMemcpySizeEXT" }, { - "vuid": "VUID-VkSubresourceLayout2EXT-sType-unique", + "vuid": "VUID-VkSubresourceLayout2KHR-sType-unique", "text": " The sType value of each struct in the pNext chain must be unique" } ] @@ -17670,6 +17750,82 @@ } ] }, + "vkGetDeviceImageSubresourceLayoutKHR": { + "core": [ + { + "vuid": "VUID-vkGetDeviceImageSubresourceLayoutKHR-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetDeviceImageSubresourceLayoutKHR-pInfo-parameter", + "text": " pInfo must be a valid pointer to a valid VkDeviceImageSubresourceInfoKHR structure" + }, + { + "vuid": "VUID-vkGetDeviceImageSubresourceLayoutKHR-pLayout-parameter", + "text": " pLayout must be a valid pointer to a VkSubresourceLayout2KHR structure" + } + ] + }, + "VkDeviceImageSubresourceInfoKHR": { + "core": [ + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-aspectMask-00997", + "text": " The aspectMask member of pSubresource must only have a single bit set" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-mipLevel-01716", + "text": " The mipLevel member of pSubresource must be less than the mipLevels specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-arrayLayer-01717", + "text": " The arrayLayer member of pSubresource must be less than the arrayLayers specified in VkImageCreateInfo when image was created" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-format-08886", + "text": " If format of the image is a color format, tiling of the image is VK_IMAGE_TILING_LINEAR or VK_IMAGE_TILING_OPTIMAL, and does not have a multi-planar image format, the aspectMask member of pSubresource must be VK_IMAGE_ASPECT_COLOR_BIT" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-format-04462", + "text": " If format of the image has a depth component, the aspectMask member of pSubresource must contain VK_IMAGE_ASPECT_DEPTH_BIT" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-format-04463", + "text": " If format of the image has a stencil component, the aspectMask member of pSubresource must contain VK_IMAGE_ASPECT_STENCIL_BIT" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-format-04464", + "text": " If format of the image does not contain a stencil or depth component, the aspectMask member of pSubresource must not contain VK_IMAGE_ASPECT_DEPTH_BIT or VK_IMAGE_ASPECT_STENCIL_BIT" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-tiling-08717", + "text": " If the tiling of the image is VK_IMAGE_TILING_LINEAR and has a multi-planar image format, then the aspectMask member of pSubresource must be a single valid multi-planar aspect mask" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-image-01895", + "text": " If image was created with the VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID external memory handle type, then image must be bound to memory" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-tiling-02271", + "text": " If the tiling of the image is VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, then the aspectMask member of pSubresource must be VK_IMAGE_ASPECT_MEMORY_PLANE{ibit}BIT_EXT and the index i must be less than the VkDrmFormatModifierPropertiesEXT::drmFormatModifierPlaneCount associated with the image’s format and VkImageDrmFormatModifierPropertiesEXT::drmFormatModifier" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-pNext-pNext", + "text": " pNext must be NULL" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkImageCreateInfo structure" + }, + { + "vuid": "VUID-VkDeviceImageSubresourceInfoKHR-pSubresource-parameter", + "text": " pSubresource must be a valid pointer to a valid VkImageSubresource2KHR structure" + } + ] + }, "vkGetImageDrmFormatModifierPropertiesEXT": { "core": [ { @@ -17744,6 +17900,10 @@ }, "vkCreateImageView": { "core": [ + { + "vuid": "VUID-vkCreateImageView-image-09179", + "text": " VkImageViewCreateInfo::image must have been created from device" + }, { "vuid": "VUID-vkCreateImageView-device-parameter", "text": " device must be a valid VkDevice handle" @@ -19160,6 +19320,10 @@ }, "vkGetMicromapBuildSizesEXT": { "core": [ + { + "vuid": "VUID-vkGetMicromapBuildSizesEXT-dstMicromap-09180", + "text": " VkMicromapBuildInfoEXT::dstMicromap must have been created from device" + }, { "vuid": "VUID-vkGetMicromapBuildSizesEXT-micromap-07439", "text": " The micromap feature must be enabled" @@ -20896,6 +21060,10 @@ "vuid": "VUID-VkSamplerCustomBorderColorCreateInfoEXT-format-04015", "text": " If the sampler is used to sample an image view of VK_FORMAT_B4G4R4A4_UNORM_PACK16, VK_FORMAT_B5G6R5_UNORM_PACK16, or VK_FORMAT_B5G5R5A1_UNORM_PACK16 format then format must not be VK_FORMAT_UNDEFINED" }, + { + "vuid": "VUID-VkSamplerCustomBorderColorCreateInfoEXT-format-08807", + "text": " If the sampler is used to sample an image view of VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR format then format must not be VK_FORMAT_UNDEFINED" + }, { "vuid": "VUID-VkSamplerCustomBorderColorCreateInfoEXT-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT" @@ -22021,12 +22189,12 @@ "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_BUFFER or VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, the range member of each element of pBufferInfo, or the effective range if range is VK_WHOLE_SIZE, must be less than or equal to VkPhysicalDeviceLimits::maxStorageBufferRange" }, { - "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00334", - "text": " If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, the VkBuffer that each element of pTexelBufferView was created from must have been created with VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT set" + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-08765", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, the pTexelBufferView buffer view usage must include VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT" }, { - "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00335", - "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, the VkBuffer that each element of pTexelBufferView was created from must have been created with VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT set" + "vuid": "VUID-VkWriteDescriptorSet-descriptorType-08766", + "text": " If descriptorType is VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, the pTexelBufferView buffer view usage must include VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT" }, { "vuid": "VUID-VkWriteDescriptorSet-descriptorType-00336", @@ -23210,7 +23378,7 @@ }, { "vuid": "VUID-VkDescriptorBufferBindingInfoEXT-pNext-pNext", - "text": " pNext must be NULL or a pointer to a valid instance of VkDescriptorBufferBindingPushDescriptorBufferHandleEXT" + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkBufferUsageFlags2CreateInfoKHR or VkDescriptorBufferBindingPushDescriptorBufferHandleEXT" }, { "vuid": "VUID-VkDescriptorBufferBindingInfoEXT-sType-unique", @@ -25510,6 +25678,34 @@ } ] }, + "CoalescedInputCountAMDX": { + "core": [ + { + "vuid": "VUID-CoalescedInputCountAMDX-CoalescedInputCountAMDX-09172", + "text": " The variable decorated with CoalescedInputCountAMDX must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-CoalescedInputCountAMDX-CoalescedInputCountAMDX-09173", + "text": " If a variable is decorated with CoalescedInputCountAMDX, the CoalescingAMDX execution mode must be declared" + }, + { + "vuid": "VUID-CoalescedInputCountAMDX-CoalescedInputCountAMDX-09174", + "text": " The variable decorated with CoalescedInputCountAMDX must be declared as a scalar 32-bit integer value" + } + ] + }, + "ShaderIndexAMDX": { + "core": [ + { + "vuid": "VUID-ShaderIndexAMDX-ShaderIndexAMDX-09175", + "text": " The variable decorated with ShaderIndexAMDX must be declared using the Input {StorageClass}" + }, + { + "vuid": "VUID-ShaderIndexAMDX-ShaderIndexAMDX-09176", + "text": " The variable decorated with ShaderIndexAMDX must be declared as a scalar 32-bit integer value" + } + ] + }, "vkCreateQueryPool": { "core": [ { @@ -25754,7 +25950,7 @@ "core": [ { "vuid": "VUID-vkCmdBeginQuery-None-00807", - "text": " All queries used by the command must be unavailable" + "text": " All queries used by the command must be unavailable" }, { "vuid": "VUID-vkCmdBeginQuery-queryType-02804", @@ -25906,7 +26102,7 @@ "core": [ { "vuid": "VUID-vkCmdBeginQueryIndexedEXT-None-00807", - "text": " All queries used by the command must be unavailable" + "text": " All queries used by the command must be unavailable" }, { "vuid": "VUID-vkCmdBeginQueryIndexedEXT-queryType-02804", @@ -26444,10 +26640,6 @@ "vuid": "VUID-vkCmdWriteTimestamp2-queryPool-03861", "text": " queryPool must have been created with a queryType of VK_QUERY_TYPE_TIMESTAMP" }, - { - "vuid": "VUID-vkCmdWriteTimestamp2-queryPool-03862", - "text": " The query identified by queryPool and query must be unavailable" - }, { "vuid": "VUID-vkCmdWriteTimestamp2-timestampValidBits-03863", "text": " The command pool’s queue family must support a non-zero timestampValidBits" @@ -26458,7 +26650,7 @@ }, { "vuid": "VUID-vkCmdWriteTimestamp2-None-03864", - "text": " All queries used by the command must be unavailable" + "text": " All queries used by the command must be unavailable" }, { "vuid": "VUID-vkCmdWriteTimestamp2-query-03865", @@ -26540,10 +26732,6 @@ "vuid": "VUID-vkCmdWriteTimestamp-queryPool-01416", "text": " queryPool must have been created with a queryType of VK_QUERY_TYPE_TIMESTAMP" }, - { - "vuid": "VUID-vkCmdWriteTimestamp-queryPool-00828", - "text": " The query identified by queryPool and query must be unavailable" - }, { "vuid": "VUID-vkCmdWriteTimestamp-timestampValidBits-00829", "text": " The command pool’s queue family must support a non-zero timestampValidBits" @@ -26554,7 +26742,7 @@ }, { "vuid": "VUID-vkCmdWriteTimestamp-None-00830", - "text": " All queries used by the command must be unavailable" + "text": " All queries used by the command must be unavailable" }, { "vuid": "VUID-vkCmdWriteTimestamp-query-00831", @@ -27757,12 +27945,20 @@ "text": " If the VK_KHR_maintenance1 extension is not enabled, VkPhysicalDeviceProperties::apiVersion is less than Vulkan 1.1, and srcImage or dstImage is of type VK_IMAGE_TYPE_2D, then for each element of pRegions, extent.depth must be 1" }, { - "vuid": "VUID-vkCmdCopyImage-srcImage-07743", - "text": " If srcImage and dstImage have a different VkImageType, one must be VK_IMAGE_TYPE_3D and the other must be VK_IMAGE_TYPE_2D" + "vuid": "VUID-vkCmdCopyImage-maintenance5-08791", + "text": " If maintenance5 is not enabled, and srcImage and dstImage have a different VkImageType, one must be VK_IMAGE_TYPE_3D and the other must be VK_IMAGE_TYPE_2D" }, { - "vuid": "VUID-vkCmdCopyImage-srcImage-07744", - "text": " If srcImage and dstImage have the same VkImageType, the layerCount member of srcSubresource and dstSubresource in each element of pRegions must match" + "vuid": "VUID-vkCmdCopyImage-maintenance5-08792", + "text": " If the maintenance5 feature is not enabled, the layerCount member of srcSubresource or dstSubresource must not be VK_REMAINING_ARRAY_LAYERS" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-08793", + "text": " If srcImage and dstImage have the same VkImageType, and neither of the layerCount members of srcSubresource or dstSubresource are VK_REMAINING_ARRAY_LAYERS, the layerCount members must match" + }, + { + "vuid": "VUID-vkCmdCopyImage-srcImage-08794", + "text": " If srcImage and dstImage have the same VkImageType, and one of the layerCount members of srcSubresource or dstSubresource is VK_REMAINING_ARRAY_LAYERS, the other member must be either VK_REMAINING_ARRAY_LAYERS or equal to the arrayLayers member of the VkImageCreateInfo used to create the image minus baseArrayLayer" }, { "vuid": "VUID-vkCmdCopyImage-srcImage-01790", @@ -27865,8 +28061,8 @@ "text": " The pname:srcSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when pname:srcImage was created" }, { - "vuid": "VUID-vkCmdCopyImage-srcSubresource-07968", - "text": " The pname:srcSubresource.baseArrayLayer + pname:srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:srcImage was created" + "vuid": "VUID-vkCmdCopyImage-srcSubresource-08790", + "text": " If pname:srcSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the pname:srcSubresource.baseArrayLayer + pname:srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:srcImage was created" }, { "vuid": "VUID-vkCmdCopyImage-srcImage-07969", @@ -27881,8 +28077,8 @@ "text": " The pname:dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when pname:dstImage was created" }, { - "vuid": "VUID-vkCmdCopyImage-dstSubresource-07968", - "text": " The pname:dstSubresource.baseArrayLayer + pname:dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:dstImage was created" + "vuid": "VUID-vkCmdCopyImage-dstSubresource-08790", + "text": " If pname:dstSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the pname:dstSubresource.baseArrayLayer + pname:dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:dstImage was created" }, { "vuid": "VUID-vkCmdCopyImage-dstImage-07969", @@ -27985,8 +28181,12 @@ "text": " aspectMask must not include VK_IMAGE_ASPECT_MEMORY_PLANE{ibit}BIT_EXT for any index i" }, { - "vuid": "VUID-VkImageSubresourceLayers-layerCount-01700", - "text": " layerCount must be greater than 0" + "vuid": "VUID-VkImageSubresourceLayers-maintenance5-08762", + "text": " If maintenance5 is not enabled, layerCount must not be VK_REMAINING_ARRAY_LAYERS" + }, + { + "vuid": "VUID-VkImageSubresourceLayers-layerCount-08763", + "text": " If layerCount is not VK_REMAINING_ARRAY_LAYERS, it must be greater than 0" }, { "vuid": "VUID-VkImageSubresourceLayers-aspectMask-parameter", @@ -28169,12 +28369,20 @@ "text": " If the VK_KHR_maintenance1 extension is not enabled, VkPhysicalDeviceProperties::apiVersion is less than Vulkan 1.1, and srcImage or dstImage is of type VK_IMAGE_TYPE_2D, then for each element of pRegions, extent.depth must be 1" }, { - "vuid": "VUID-VkCopyImageInfo2-srcImage-07743", - "text": " If srcImage and dstImage have a different VkImageType, one must be VK_IMAGE_TYPE_3D and the other must be VK_IMAGE_TYPE_2D" + "vuid": "VUID-VkCopyImageInfo2-maintenance5-08791", + "text": " If maintenance5 is not enabled, and srcImage and dstImage have a different VkImageType, one must be VK_IMAGE_TYPE_3D and the other must be VK_IMAGE_TYPE_2D" }, { - "vuid": "VUID-VkCopyImageInfo2-srcImage-07744", - "text": " If srcImage and dstImage have the same VkImageType, the layerCount member of srcSubresource and dstSubresource in each element of pRegions must match" + "vuid": "VUID-VkCopyImageInfo2-maintenance5-08792", + "text": " If the maintenance5 feature is not enabled, the layerCount member of srcSubresource or dstSubresource must not be VK_REMAINING_ARRAY_LAYERS" + }, + { + "vuid": "VUID-VkCopyImageInfo2-srcImage-08793", + "text": " If srcImage and dstImage have the same VkImageType, and neither of the layerCount members of srcSubresource or dstSubresource are VK_REMAINING_ARRAY_LAYERS, the layerCount members must match" + }, + { + "vuid": "VUID-VkCopyImageInfo2-srcImage-08794", + "text": " If srcImage and dstImage have the same VkImageType, and one of the layerCount members of srcSubresource or dstSubresource is VK_REMAINING_ARRAY_LAYERS, the other member must be either VK_REMAINING_ARRAY_LAYERS or equal to the arrayLayers member of the VkImageCreateInfo used to create the image minus baseArrayLayer" }, { "vuid": "VUID-VkCopyImageInfo2-srcImage-01790", @@ -28277,8 +28485,8 @@ "text": " The pname:srcSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when pname:srcImage was created" }, { - "vuid": "VUID-VkCopyImageInfo2-srcSubresource-07968", - "text": " The pname:srcSubresource.baseArrayLayer + pname:srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:srcImage was created" + "vuid": "VUID-VkCopyImageInfo2-srcSubresource-08790", + "text": " If pname:srcSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the pname:srcSubresource.baseArrayLayer + pname:srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:srcImage was created" }, { "vuid": "VUID-VkCopyImageInfo2-srcImage-07969", @@ -28293,8 +28501,8 @@ "text": " The pname:dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when pname:dstImage was created" }, { - "vuid": "VUID-VkCopyImageInfo2-dstSubresource-07968", - "text": " The pname:dstSubresource.baseArrayLayer + pname:dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:dstImage was created" + "vuid": "VUID-VkCopyImageInfo2-dstSubresource-08790", + "text": " If pname:dstSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the pname:dstSubresource.baseArrayLayer + pname:dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:dstImage was created" }, { "vuid": "VUID-VkCopyImageInfo2-dstImage-07969", @@ -28389,8 +28597,8 @@ "text": " The pname:imageSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when pname:dstImage was created" }, { - "vuid": "VUID-vkCmdCopyBufferToImage-imageSubresource-07968", - "text": " The pname:imageSubresource.baseArrayLayer + pname:imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:dstImage was created" + "vuid": "VUID-vkCmdCopyBufferToImage-imageSubresource-08790", + "text": " If pname:imageSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the pname:imageSubresource.baseArrayLayer + pname:imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:dstImage was created" }, { "vuid": "VUID-vkCmdCopyBufferToImage-dstImage-07969", @@ -28605,8 +28813,8 @@ "text": " The pname:imageSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when pname:srcImage was created" }, { - "vuid": "VUID-vkCmdCopyImageToBuffer-imageSubresource-07968", - "text": " The pname:imageSubresource.baseArrayLayer + pname:imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:srcImage was created" + "vuid": "VUID-vkCmdCopyImageToBuffer-imageSubresource-08790", + "text": " If pname:imageSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the pname:imageSubresource.baseArrayLayer + pname:imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:srcImage was created" }, { "vuid": "VUID-vkCmdCopyImageToBuffer-srcImage-07969", @@ -28953,8 +29161,8 @@ "text": " The pname:imageSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when pname:dstImage was created" }, { - "vuid": "VUID-VkCopyBufferToImageInfo2-imageSubresource-07968", - "text": " The pname:imageSubresource.baseArrayLayer + pname:imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:dstImage was created" + "vuid": "VUID-VkCopyBufferToImageInfo2-imageSubresource-08790", + "text": " If pname:imageSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the pname:imageSubresource.baseArrayLayer + pname:imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:dstImage was created" }, { "vuid": "VUID-VkCopyBufferToImageInfo2-dstImage-07969", @@ -29193,8 +29401,8 @@ "text": " The pname:imageSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when pname:srcImage was created" }, { - "vuid": "VUID-VkCopyImageToBufferInfo2-imageSubresource-07968", - "text": " The pname:imageSubresource.baseArrayLayer + pname:imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:srcImage was created" + "vuid": "VUID-VkCopyImageToBufferInfo2-imageSubresource-08790", + "text": " If pname:imageSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the pname:imageSubresource.baseArrayLayer + pname:imageSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when pname:srcImage was created" }, { "vuid": "VUID-VkCopyImageToBufferInfo2-srcImage-07969", @@ -29477,8 +29685,8 @@ "text": " The specified mipLevel of each region must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" }, { - "vuid": "VUID-vkCmdCopyMemoryToImageIndirectNV-baseArrayLayer-07671", - "text": " The specified baseArrayLayer + layerCount of each region must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + "vuid": "VUID-vkCmdCopyMemoryToImageIndirectNV-layerCount-08764", + "text": " If layerCount is not VK_REMAINING_ARRAY_LAYERS, the specified baseArrayLayer + layerCount of each region must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" }, { "vuid": "VUID-vkCmdCopyMemoryToImageIndirectNV-imageOffset-07672", @@ -29693,12 +29901,12 @@ "text": " The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" }, { - "vuid": "VUID-vkCmdBlitImage-srcSubresource-01707", - "text": " The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + "vuid": "VUID-vkCmdBlitImage-srcSubresource-08788", + "text": " If srcSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" }, { - "vuid": "VUID-vkCmdBlitImage-dstSubresource-01708", - "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + "vuid": "VUID-vkCmdBlitImage-dstSubresource-08789", + "text": " If dstSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" }, { "vuid": "VUID-vkCmdBlitImage-dstImage-02545", @@ -29817,8 +30025,16 @@ "text": " The aspectMask member of srcSubresource and dstSubresource must match" }, { - "vuid": "VUID-VkImageBlit-layerCount-00239", - "text": " The layerCount member of srcSubresource and dstSubresource must match" + "vuid": "VUID-VkImageBlit-maintenance5-08799", + "text": " If the maintenance5 feature is not enabled, the layerCount member of srcSubresource or dstSubresource must not be VK_REMAINING_ARRAY_LAYERS" + }, + { + "vuid": "VUID-VkImageBlit-layerCount-08800", + "text": " If neither of the layerCount members of srcSubresource or dstSubresource are VK_REMAINING_ARRAY_LAYERS, the layerCount members must match" + }, + { + "vuid": "VUID-VkImageBlit-layerCount-08801", + "text": " If one of the layerCount members of srcSubresource or dstSubresource is VK_REMAINING_ARRAY_LAYERS, the other member must be either VK_REMAINING_ARRAY_LAYERS or equal to the arrayLayers member of the VkImageCreateInfo used to create the image minus baseArrayLayer" }, { "vuid": "VUID-VkImageBlit-srcSubresource-parameter", @@ -29977,12 +30193,12 @@ "text": " The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" }, { - "vuid": "VUID-VkBlitImageInfo2-srcSubresource-01707", - "text": " The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + "vuid": "VUID-VkBlitImageInfo2-srcSubresource-08788", + "text": " If srcSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" }, { - "vuid": "VUID-VkBlitImageInfo2-dstSubresource-01708", - "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + "vuid": "VUID-VkBlitImageInfo2-dstSubresource-08789", + "text": " If dstSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" }, { "vuid": "VUID-VkBlitImageInfo2-dstImage-02545", @@ -30101,8 +30317,16 @@ "text": " The aspectMask member of srcSubresource and dstSubresource must match" }, { - "vuid": "VUID-VkImageBlit2-layerCount-00239", - "text": " The layerCount member of srcSubresource and dstSubresource must match" + "vuid": "VUID-VkImageBlit2-maintenance5-08799", + "text": " If the maintenance5 feature is not enabled, the layerCount member of srcSubresource or dstSubresource must not be VK_REMAINING_ARRAY_LAYERS" + }, + { + "vuid": "VUID-VkImageBlit2-layerCount-08800", + "text": " If neither of the layerCount members of srcSubresource or dstSubresource are VK_REMAINING_ARRAY_LAYERS, the layerCount members must match" + }, + { + "vuid": "VUID-VkImageBlit2-layerCount-08801", + "text": " If one of the layerCount members of srcSubresource or dstSubresource is VK_REMAINING_ARRAY_LAYERS, the other member must be either VK_REMAINING_ARRAY_LAYERS or equal to the arrayLayers member of the VkImageCreateInfo used to create the image minus baseArrayLayer" }, { "vuid": "VUID-VkImageBlit2-sType-sType", @@ -30197,12 +30421,12 @@ "text": " The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" }, { - "vuid": "VUID-vkCmdResolveImage-srcSubresource-01711", - "text": " The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + "vuid": "VUID-vkCmdResolveImage-srcSubresource-08805", + "text": " If srcSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" }, { - "vuid": "VUID-vkCmdResolveImage-dstSubresource-01712", - "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + "vuid": "VUID-vkCmdResolveImage-dstSubresource-08806", + "text": " If dstSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" }, { "vuid": "VUID-vkCmdResolveImage-dstImage-02546", @@ -30329,8 +30553,16 @@ "text": " The aspectMask member of srcSubresource and dstSubresource must only contain VK_IMAGE_ASPECT_COLOR_BIT" }, { - "vuid": "VUID-VkImageResolve-layerCount-00267", - "text": " The layerCount member of srcSubresource and dstSubresource must match" + "vuid": "VUID-VkImageResolve-maintenance5-08802", + "text": " If the maintenance5 feature is not enabled, the layerCount member of srcSubresource or dstSubresource must not be VK_REMAINING_ARRAY_LAYERS" + }, + { + "vuid": "VUID-VkImageResolve-layerCount-08803", + "text": " If neither of the layerCount members of srcSubresource or dstSubresource are VK_REMAINING_ARRAY_LAYERS, the layerCount members must match" + }, + { + "vuid": "VUID-VkImageResolve-layerCount-08804", + "text": " If one of the layerCount members of srcSubresource or dstSubresource is VK_REMAINING_ARRAY_LAYERS, the other member must be either VK_REMAINING_ARRAY_LAYERS or equal to the arrayLayers member of the VkImageCreateInfo used to create the image minus baseArrayLayer" }, { "vuid": "VUID-VkImageResolve-srcSubresource-parameter", @@ -30441,12 +30673,12 @@ "text": " The dstSubresource.mipLevel member of each element of pRegions must be less than the mipLevels specified in VkImageCreateInfo when dstImage was created" }, { - "vuid": "VUID-VkResolveImageInfo2-srcSubresource-01711", - "text": " The srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" + "vuid": "VUID-VkResolveImageInfo2-srcSubresource-08805", + "text": " If srcSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the srcSubresource.baseArrayLayer + srcSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when srcImage was created" }, { - "vuid": "VUID-VkResolveImageInfo2-dstSubresource-01712", - "text": " The dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" + "vuid": "VUID-VkResolveImageInfo2-dstSubresource-08806", + "text": " If dstSubresource.layerCount is not VK_REMAINING_ARRAY_LAYERS, the dstSubresource.baseArrayLayer + dstSubresource.layerCount of each element of pRegions must be less than or equal to the arrayLayers specified in VkImageCreateInfo when dstImage was created" }, { "vuid": "VUID-VkResolveImageInfo2-dstImage-02546", @@ -30561,8 +30793,16 @@ "text": " The aspectMask member of srcSubresource and dstSubresource must only contain VK_IMAGE_ASPECT_COLOR_BIT" }, { - "vuid": "VUID-VkImageResolve2-layerCount-00267", - "text": " The layerCount member of srcSubresource and dstSubresource must match" + "vuid": "VUID-VkImageResolve2-maintenance5-08802", + "text": " If the maintenance5 feature is not enabled, the layerCount member of srcSubresource or dstSubresource must not be VK_REMAINING_ARRAY_LAYERS" + }, + { + "vuid": "VUID-VkImageResolve2-layerCount-08803", + "text": " If neither of the layerCount members of srcSubresource or dstSubresource are VK_REMAINING_ARRAY_LAYERS, the layerCount members must match" + }, + { + "vuid": "VUID-VkImageResolve2-layerCount-08804", + "text": " If one of the layerCount members of srcSubresource or dstSubresource is VK_REMAINING_ARRAY_LAYERS, the other member must be either VK_REMAINING_ARRAY_LAYERS or equal to the arrayLayers member of the VkImageCreateInfo used to create the image minus baseArrayLayer" }, { "vuid": "VUID-VkImageResolve2-sType-sType", @@ -30873,27 +31113,27 @@ "vkCmdBindIndexBuffer": { "core": [ { - "vuid": "VUID-vkCmdBindIndexBuffer-offset-00431", + "vuid": "VUID-vkCmdBindIndexBuffer-offset-08782", "text": " offset must be less than the size of buffer" }, { - "vuid": "VUID-vkCmdBindIndexBuffer-offset-00432", - "text": " The sum of offset and the address of the range of VkDeviceMemory object that is backing buffer, must be a multiple of the type indicated by indexType" + "vuid": "VUID-vkCmdBindIndexBuffer-offset-08783", + "text": " The sum of offset and the base address of the range of VkDeviceMemory object that is backing buffer, must be a multiple of the size of the type indicated by indexType" }, { - "vuid": "VUID-vkCmdBindIndexBuffer-buffer-00433", + "vuid": "VUID-vkCmdBindIndexBuffer-buffer-08784", "text": " buffer must have been created with the VK_BUFFER_USAGE_INDEX_BUFFER_BIT flag" }, { - "vuid": "VUID-vkCmdBindIndexBuffer-buffer-00434", + "vuid": "VUID-vkCmdBindIndexBuffer-buffer-08785", "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" }, { - "vuid": "VUID-vkCmdBindIndexBuffer-indexType-02507", + "vuid": "VUID-vkCmdBindIndexBuffer-indexType-08786", "text": " indexType must not be VK_INDEX_TYPE_NONE_KHR" }, { - "vuid": "VUID-vkCmdBindIndexBuffer-indexType-02765", + "vuid": "VUID-vkCmdBindIndexBuffer-indexType-08787", "text": " If indexType is VK_INDEX_TYPE_UINT8_EXT, the indexTypeUint8 feature must be enabled" }, { @@ -30926,6 +31166,70 @@ } ] }, + "vkCmdBindIndexBuffer2KHR": { + "core": [ + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-offset-08782", + "text": " offset must be less than the size of buffer" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-offset-08783", + "text": " The sum of offset and the base address of the range of VkDeviceMemory object that is backing buffer, must be a multiple of the size of the type indicated by indexType" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-buffer-08784", + "text": " buffer must have been created with the VK_BUFFER_USAGE_INDEX_BUFFER_BIT flag" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-buffer-08785", + "text": " If buffer is non-sparse then it must be bound completely and contiguously to a single VkDeviceMemory object" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-indexType-08786", + "text": " indexType must not be VK_INDEX_TYPE_NONE_KHR" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-indexType-08787", + "text": " If indexType is VK_INDEX_TYPE_UINT8_EXT, the indexTypeUint8 feature must be enabled" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-size-08767", + "text": " If size is not VK_WHOLE_SIZE, size must be a multiple of the size of the type indicated by indexType" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-size-08768", + "text": " If size is not VK_WHOLE_SIZE, the sum of offset and size must be less than or equal to the size of buffer" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-buffer-parameter", + "text": " buffer must be a valid VkBuffer handle" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-indexType-parameter", + "text": " indexType must be a valid VkIndexType value" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics operations" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-videocoding", + "text": " This command must only be called outside of a video coding scope" + }, + { + "vuid": "VUID-vkCmdBindIndexBuffer2KHR-commonparent", + "text": " Both of buffer, and commandBuffer must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, "vkCmdDraw": { "core": [ { @@ -31085,8 +31389,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDraw-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDraw-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDraw-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDraw-OpImageWrite-04469", @@ -32397,8 +32705,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawIndexed-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawIndexed-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawIndexed-OpImageWrite-04469", @@ -33536,6 +33848,10 @@ "vuid": "VUID-vkCmdDrawIndexed-robustBufferAccess2-07825", "text": " If robustBufferAccess2 is not enabled, (indexSize {times} (firstIndex + indexCount) + offset) must be less than or equal to the size of the bound index buffer, with indexSize being based on the type specified by indexType, where the index buffer, indexType, and offset are specified via vkCmdBindIndexBuffer" }, + { + "vuid": "VUID-vkCmdDrawIndexed-robustBufferAccess2-08798", + "text": " If robustBufferAccess2 is not enabled, (indexSize {times} (firstIndex + indexCount) + offset) must be less than or equal to the size of the bound index buffer, with indexSize being based on the type specified by indexType, where the index buffer, indexType, and offset are specified via vkCmdBindIndexBuffer or vkCmdBindIndexBuffer2KHR. If vkCmdBindIndexBuffer2KHR is used to bind the index buffer, the size of the bound index buffer is vkCmdBindIndexBuffer2KHR::size" + }, { "vuid": "VUID-vkCmdDrawIndexed-commandBuffer-parameter", "text": " commandBuffer must be a valid VkCommandBuffer handle" @@ -33717,8 +34033,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawMultiEXT-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawMultiEXT-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawMultiEXT-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawMultiEXT-OpImageWrite-04469", @@ -35045,8 +35365,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawMultiIndexedEXT-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawMultiIndexedEXT-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-OpImageWrite-04469", @@ -36184,6 +36508,10 @@ "vuid": "VUID-vkCmdDrawMultiIndexedEXT-robustBufferAccess2-07825", "text": " If robustBufferAccess2 is not enabled, (indexSize {times} (firstIndex + indexCount) + offset) must be less than or equal to the size of the bound index buffer, with indexSize being based on the type specified by indexType, where the index buffer, indexType, and offset are specified via vkCmdBindIndexBuffer" }, + { + "vuid": "VUID-vkCmdDrawMultiIndexedEXT-robustBufferAccess2-08798", + "text": " If robustBufferAccess2 is not enabled, (indexSize {times} (firstIndex + indexCount) + offset) must be less than or equal to the size of the bound index buffer, with indexSize being based on the type specified by indexType, where the index buffer, indexType, and offset are specified via vkCmdBindIndexBuffer or vkCmdBindIndexBuffer2KHR. If vkCmdBindIndexBuffer2KHR is used to bind the index buffer, the size of the bound index buffer is vkCmdBindIndexBuffer2KHR::size" + }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-04937", "text": " The multiDraw feature must be enabled" @@ -36385,8 +36713,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawIndirect-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawIndirect-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawIndirect-OpImageWrite-04469", @@ -37741,8 +38073,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawIndirectCount-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawIndirectCount-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawIndirectCount-OpImageWrite-04469", @@ -39109,8 +39445,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawIndexedIndirect-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawIndexedIndirect-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-OpImageWrite-04469", @@ -40304,6 +40644,10 @@ }, "VkDrawIndexedIndirectCommand": { "core": [ + { + "vuid": "VUID-VkDrawIndexedIndirectCommand-robustBufferAccess2-08798", + "text": " If robustBufferAccess2 is not enabled, (indexSize {times} (firstIndex + indexCount) + offset) must be less than or equal to the size of the bound index buffer, with indexSize being based on the type specified by indexType, where the index buffer, indexType, and offset are specified via vkCmdBindIndexBuffer or vkCmdBindIndexBuffer2KHR. If vkCmdBindIndexBuffer2KHR is used to bind the index buffer, the size of the bound index buffer is vkCmdBindIndexBuffer2KHR::size" + }, { "vuid": "VUID-VkDrawIndexedIndirectCommand-None-00552", "text": " For a given vertex buffer binding, any attribute data fetched must be entirely contained within the corresponding vertex buffer binding, as described in Vertex Input Description" @@ -40473,8 +40817,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-OpImageWrite-04469", @@ -41849,8 +42197,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-OpImageWrite-04469", @@ -43281,8 +43633,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawMeshTasksNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawMeshTasksNV-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksNV-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawMeshTasksNV-OpImageWrite-04469", @@ -44525,8 +44881,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectNV-OpImageWrite-04469", @@ -45817,8 +46177,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountNV-OpImageWrite-04469", @@ -47125,8 +47489,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawMeshTasksEXT-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawMeshTasksEXT-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksEXT-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawMeshTasksEXT-OpImageWrite-04469", @@ -48397,8 +48765,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectEXT-OpImageWrite-04469", @@ -49717,8 +50089,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawMeshTasksIndirectCountEXT-OpImageWrite-04469", @@ -51025,8 +51401,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawClusterHUAWEI-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawClusterHUAWEI-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawClusterHUAWEI-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawClusterHUAWEI-OpImageWrite-04469", @@ -52281,8 +52661,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDrawClusterIndirectHUAWEI-OpImageWrite-04469", @@ -53677,8 +54061,12 @@ "text": " All elements of pOffsets must be less than the size of the corresponding element in pBuffers" }, { - "vuid": "VUID-vkCmdBindVertexBuffers2-pSizes-03358", - "text": " If pSizes is not NULL, all elements of pOffsets plus pSizes must be less than or equal to the size of the corresponding element in pBuffers" + "vuid": "VUID-vkCmdBindVertexBuffers2-pSizes-08769", + "text": " If pSizes is not NULL, all elements of pSizes that are not VK_WHOLE_SIZE must have the corresponding elements in pOffsets plus pSizes be less than or equal to the size of the corresponding element in pBuffers" + }, + { + "vuid": "VUID-vkCmdBindVertexBuffers2-pSizes-08770", + "text": " If pSizes is not NULL, all elements of pSizes that are VK_WHOLE_SIZE must have the corresponding element in pOffsets be less than the size of the corresponding element in pBuffers" }, { "vuid": "VUID-vkCmdBindVertexBuffers2-pBuffers-03359", @@ -57477,8 +57865,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDispatch-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDispatch-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDispatch-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDispatch-OpImageWrite-04469", @@ -57749,8 +58141,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDispatchIndirect-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDispatchIndirect-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDispatchIndirect-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDispatchIndirect-OpImageWrite-04469", @@ -58041,8 +58437,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdDispatchBase-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdDispatchBase-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDispatchBase-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdDispatchBase-OpImageWrite-04469", @@ -58329,8 +58729,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdSubpassShadingHUAWEI-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdSubpassShadingHUAWEI-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdSubpassShadingHUAWEI-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdSubpassShadingHUAWEI-OpImageWrite-04469", @@ -59005,8 +59409,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-OpImageWrite-04469", @@ -66981,8 +67389,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdTraceRaysNV-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdTraceRaysNV-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdTraceRaysNV-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdTraceRaysNV-OpImageWrite-04469", @@ -67345,8 +67757,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdTraceRaysKHR-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdTraceRaysKHR-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdTraceRaysKHR-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdTraceRaysKHR-OpImageWrite-04469", @@ -67825,8 +68241,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdTraceRaysIndirectKHR-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirectKHR-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdTraceRaysIndirectKHR-OpImageWrite-04469", @@ -68253,8 +68673,12 @@ "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" }, { - "vuid": "VUID-vkCmdTraceRaysIndirect2KHR-None-04115", - "text": " If a VkImageView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + "vuid": "VUID-vkCmdTraceRaysIndirect2KHR-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdTraceRaysIndirect2KHR-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" }, { "vuid": "VUID-vkCmdTraceRaysIndirect2KHR-OpImageWrite-04469", @@ -71110,6 +71534,1306 @@ } ] }, + "vkCreateExecutionGraphPipelinesAMDX": { + "core": [ + { + "vuid": "VUID-vkCreateExecutionGraphPipelinesAMDX-shaderEnqueue-09124", + "text": " The shaderEnqueue feature must be enabled" + }, + { + "vuid": "VUID-vkCreateExecutionGraphPipelinesAMDX-flags-09125", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and the basePipelineIndex member of that same element is not -1, basePipelineIndex must be less than the index into pCreateInfos that corresponds to that element" + }, + { + "vuid": "VUID-vkCreateExecutionGraphPipelinesAMDX-flags-09126", + "text": " If the flags member of any element of pCreateInfos contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, the base pipeline must have been created with the VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT flag set" + }, + { + "vuid": "VUID-vkCreateExecutionGraphPipelinesAMDX-pipelineCache-09127", + "text": " If pipelineCache was created with VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT, host access to pipelineCache must be externally synchronized" + }, + { + "vuid": "VUID-vkCreateExecutionGraphPipelinesAMDX-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkCreateExecutionGraphPipelinesAMDX-pipelineCache-parameter", + "text": " If pipelineCache is not VK_NULL_HANDLE, pipelineCache must be a valid VkPipelineCache handle" + }, + { + "vuid": "VUID-vkCreateExecutionGraphPipelinesAMDX-pCreateInfos-parameter", + "text": " pCreateInfos must be a valid pointer to an array of createInfoCount valid VkExecutionGraphPipelineCreateInfoAMDX structures" + }, + { + "vuid": "VUID-vkCreateExecutionGraphPipelinesAMDX-pAllocator-parameter", + "text": " If pAllocator is not NULL, pAllocator must be a valid pointer to a valid VkAllocationCallbacks structure" + }, + { + "vuid": "VUID-vkCreateExecutionGraphPipelinesAMDX-pPipelines-parameter", + "text": " pPipelines must be a valid pointer to an array of createInfoCount VkPipeline handles" + }, + { + "vuid": "VUID-vkCreateExecutionGraphPipelinesAMDX-createInfoCount-arraylength", + "text": " createInfoCount must be greater than 0" + }, + { + "vuid": "VUID-vkCreateExecutionGraphPipelinesAMDX-pipelineCache-parent", + "text": " If pipelineCache is a valid handle, it must have been created, allocated, or retrieved from device" + } + ] + }, + "VkExecutionGraphPipelineCreateInfoAMDX": { + "core": [ + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-07984", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineIndex is -1, basePipelineHandle must be a valid handle to a ray tracing VkPipeline" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-07985", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, and basePipelineHandle is VK_NULL_HANDLE, basePipelineIndex must be a valid index into the calling command’s pCreateInfos parameter" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-07986", + "text": " If flags contains the VK_PIPELINE_CREATE_DERIVATIVE_BIT flag, basePipelineIndex must be -1 or basePipelineHandle must be VK_NULL_HANDLE" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-layout-07987", + "text": " If a push constant block is declared in a shader, a push constant range in layout must match both the shader stage and range" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-layout-07988", + "text": " If a resource variables is declared in a shader, a descriptor slot in layout must match the shader stage" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-layout-07990", + "text": " If a resource variables is declared in a shader, and the descriptor type is not VK_DESCRIPTOR_TYPE_MUTABLE_EXT, a descriptor slot in layout must match the descriptor type" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-layout-07991", + "text": " If a resource variables is declared in a shader as an array, a descriptor slot in layout must match the descriptor count" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-03365", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-03366", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-03367", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-03368", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-03369", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-03370", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-03576", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-04945", + "text": " flags must not include VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-09007", + "text": " If flags includes VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, then the VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedComputePipelines feature must be enabled" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-09008", + "text": " If flags includes VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, then the pNext chain must include a pointer to a valid instance of VkComputePipelineIndirectBufferInfoNV specifying the address where the pipeline’s metadata will be saved" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-pipelineCreationCacheControl-02875", + "text": " If the pipelineCreationCacheControl feature is not enabled, flags must not include VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT or VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-stage-09128", + "text": " The stage member of any element of pStages must be VK_SHADER_STAGE_COMPUTE_BIT" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-pStages-09129", + "text": " The shader code for the entry point identified by each element of pStages and the rest of the state identified by this structure must adhere to the pipeline linking rules described in the Shader Interfaces chapter" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-layout-09130", + "text": " layout must be consistent with the layout of the shaders specified in pStages" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-pLibraryInfo-09131", + "text": " If pLibraryInfo is not NULL, each element of its pLibraries member must have been created with a layout that is compatible with the layout in this pipeline" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-layout-09132", + "text": " The number of resources in layout accessible to each shader stage that is used by the pipeline must be less than or equal to VkPhysicalDeviceLimits::maxPerStageResources" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-pLibraryInfo-09133", + "text": " If pLibraryInfo is not NULL, each element of pLibraryInfo->libraries must be either a compute pipeline or an execution graph pipeline" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-None-09134", + "text": " There must be no two nodes in the pipeline that share both the same shader name and index, as specified by VkPipelineShaderStageNodeCreateInfoAMDX" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-None-09135", + "text": " There must be no two nodes in the pipeline that share the same shader name and have input payload declarations with different sizes" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-None-09136", + "text": " There must be no two nodes in the pipeline that share the same name but have different execution models" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-CoalescedInputCountAMDX-09137", + "text": " There must be no two nodes in the pipeline that share the same name where one includes CoalescedInputCountAMDX and the other does not" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-StaticNumWorkgroupsAMDX-09138", + "text": " There must be no two nodes in the pipeline that share the same name where one includes StaticNumWorkgroupsAMDX and the other does not" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-PayloadNodeNameAMDX-09139", + "text": " If an output payload declared in any shader in the pipeline has a PayloadNodeNameAMDX decoration with a Node Name that matches the shader name of any other node in the graph, the size of the output payload must match the size of the input payload in the matching node" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-pNext-pNext", + "text": " Each pNext member of any structure (including this one) in the pNext chain must be either NULL or a pointer to a valid instance of VkPipelineCompilerControlCreateInfoAMD or VkPipelineCreationFeedbackCreateInfo" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-flags-parameter", + "text": " flags must be a valid combination of VkPipelineCreateFlagBits values" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-pStages-parameter", + "text": " If stageCount is not 0, and pStages is not NULL, pStages must be a valid pointer to an array of stageCount valid VkPipelineShaderStageCreateInfo structures" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-pLibraryInfo-parameter", + "text": " If pLibraryInfo is not NULL, pLibraryInfo must be a valid pointer to a valid VkPipelineLibraryCreateInfoKHR structure" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-layout-parameter", + "text": " layout must be a valid VkPipelineLayout handle" + }, + { + "vuid": "VUID-VkExecutionGraphPipelineCreateInfoAMDX-commonparent", + "text": " Both of basePipelineHandle, and layout that are valid handles of non-ignored parameters must have been created, allocated, or retrieved from the same VkDevice" + } + ] + }, + "VkPipelineShaderStageNodeCreateInfoAMDX": { + "core": [ + { + "vuid": "VUID-VkPipelineShaderStageNodeCreateInfoAMDX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX" + }, + { + "vuid": "VUID-VkPipelineShaderStageNodeCreateInfoAMDX-pName-parameter", + "text": " If pName is not NULL, pName must be a null-terminated UTF-8 string" + } + ] + }, + "vkGetExecutionGraphPipelineNodeIndexAMDX": { + "core": [ + { + "vuid": "VUID-vkGetExecutionGraphPipelineNodeIndexAMDX-pNodeInfo-09140", + "text": " pNodeInfo->pName must not be NULL" + }, + { + "vuid": "VUID-vkGetExecutionGraphPipelineNodeIndexAMDX-pNodeInfo-09141", + "text": " pNodeInfo->index must not be VK_SHADER_INDEX_UNUSED_AMDX" + }, + { + "vuid": "VUID-vkGetExecutionGraphPipelineNodeIndexAMDX-executionGraph-09142", + "text": " There must be a node in executionGraph with a shader name and index equal to pNodeInfo->pName and pNodeInfo->index" + }, + { + "vuid": "VUID-vkGetExecutionGraphPipelineNodeIndexAMDX-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetExecutionGraphPipelineNodeIndexAMDX-executionGraph-parameter", + "text": " executionGraph must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkGetExecutionGraphPipelineNodeIndexAMDX-pNodeInfo-parameter", + "text": " pNodeInfo must be a valid pointer to a valid VkPipelineShaderStageNodeCreateInfoAMDX structure" + }, + { + "vuid": "VUID-vkGetExecutionGraphPipelineNodeIndexAMDX-pNodeIndex-parameter", + "text": " pNodeIndex must be a valid pointer to a uint32_t value" + }, + { + "vuid": "VUID-vkGetExecutionGraphPipelineNodeIndexAMDX-executionGraph-parent", + "text": " executionGraph must have been created, allocated, or retrieved from device" + } + ] + }, + "vkGetExecutionGraphPipelineScratchSizeAMDX": { + "core": [ + { + "vuid": "VUID-vkGetExecutionGraphPipelineScratchSizeAMDX-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetExecutionGraphPipelineScratchSizeAMDX-executionGraph-parameter", + "text": " executionGraph must be a valid VkPipeline handle" + }, + { + "vuid": "VUID-vkGetExecutionGraphPipelineScratchSizeAMDX-pSizeInfo-parameter", + "text": " pSizeInfo must be a valid pointer to a VkExecutionGraphPipelineScratchSizeAMDX structure" + }, + { + "vuid": "VUID-vkGetExecutionGraphPipelineScratchSizeAMDX-executionGraph-parent", + "text": " executionGraph must have been created, allocated, or retrieved from device" + } + ] + }, + "VkExecutionGraphPipelineScratchSizeAMDX": { + "core": [ + { + "vuid": "VUID-VkExecutionGraphPipelineScratchSizeAMDX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX" + } + ] + }, + "vkCmdInitializeGraphScratchMemoryAMDX": { + "core": [ + { + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-scratch-09143", + "text": " scratch must be the device address of an allocated memory range at least as large as the value of VkExecutionGraphPipelineScratchSizeAMDX::size returned by VkExecutionGraphPipelineScratchSizeAMDX for the currently bound execution graph pipeline." + }, + { + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-scratch-09144", + "text": " scratch must be a multiple of 64" + }, + { + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-videocoding", + "text": " This command must only be called outside of a video coding scope" + }, + { + "vuid": "VUID-vkCmdInitializeGraphScratchMemoryAMDX-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" + } + ] + }, + "vkCmdDispatchGraphAMDX": { + "core": [ + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-06479", + "text": " If a VkImageView is sampled with depth comparison, the image view’s format features must contain VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-02691", + "text": " If a VkImageView is accessed using atomic operations as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-07888", + "text": " If a VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER descriptor is accessed using atomic operations as a result of this command, then the storage texel buffer’s format features must contain VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpTypeImage-07027", + "text": " For any VkImageView being written as a storage image where the image format field of the OpTypeImage is Unknown, the view’s format features must contain VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpTypeImage-07028", + "text": " For any VkImageView being read as a storage image where the image format field of the OpTypeImage is Unknown, the view’s format features must contain VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpTypeImage-07029", + "text": " For any VkBufferView being written as a storage texel buffer where the image format field of the OpTypeImage is Unknown, the view’s buffer features must contain VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpTypeImage-07030", + "text": " Any VkBufferView being read as a storage texel buffer where the image format field of the OpTypeImage is Unknown then the view’s buffer features must contain VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08600", + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08601", + "text": " For each push constant that is statically used by a bound shader, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout or VkDescriptorSetLayout and VkPushConstantRange arrays used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-maintenance4-08602", + "text": " If the maintenance4 feature is not enabled, then for each push constant that is statically used by a bound shader, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout or VkDescriptorSetLayout and VkPushConstantRange arrays used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08114", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command and the bound VkPipeline was not created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08115", + "text": " If the descriptors used by the VkPipeline bound to the pipeline bind point were specified via vkCmdBindDescriptorSets, the bound VkPipeline must have been created without VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08116", + "text": " Descriptors in bound descriptor buffers, specified via vkCmdSetDescriptorBufferOffsetsEXT, must be valid if they are dynamically used by the VkPipeline bound to the pipeline bind point used by this command and the bound VkPipeline was created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08604", + "text": " Descriptors in bound descriptor buffers, specified via vkCmdSetDescriptorBufferOffsetsEXT, must be valid if they are dynamically used by any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08117", + "text": " If the descriptors used by the VkPipeline bound to the pipeline bind point were specified via vkCmdSetDescriptorBufferOffsetsEXT, the bound VkPipeline must have been created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08119", + "text": " If a descriptor is dynamically used with a VkPipeline created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, the descriptor memory must be resident" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08605", + "text": " If a descriptor is dynamically used with a VkShaderEXT created with a VkDescriptorSetLayout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, the descriptor memory must be resident" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08606", + "text": " If the shaderObject feature is not enabled, a valid pipeline must be bound to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08607", + "text": " If the shaderObject is enabled, either a valid pipeline must be bound to the pipeline bind point used by this command, or a valid combination of valid and VK_NULL_HANDLE shader objects must be bound to every supported shader stage corresponding to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08608", + "text": " If a pipeline is bound to the pipeline bind point used by this command, there must not have been any calls to dynamic state setting commands for any state not specified as dynamic in the VkPipeline object bound to the pipeline bind point used by this command, since that pipeline was bound" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08609", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command or any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08610", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command or any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08611", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command or any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-uniformBuffers-06935", + "text": " If any stage of the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, and that stage was created without enabling either VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT or VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT for uniformBuffers, and the robustBufferAccess feature is not enabled, that stage must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08612", + "text": " If the robustBufferAccess feature is not enabled, and any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-storageBuffers-06936", + "text": " If any stage of the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, and that stage was created without enabling either VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT or VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT for storageBuffers, and the robustBufferAccess feature is not enabled, that stage must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-08613", + "text": " If the robustBufferAccess feature is not enabled, and any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer and protectedNoFault is not supported, any resource accessed by bound shaders must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-06550", + "text": " If a bound shader accesses a VkSampler or VkImageView object that enables sampler {YCbCr} conversion, that object must only be used with OpImageSample* or OpImageSparseSample* instructions" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-ConstOffset-06551", + "text": " If a bound shader accesses a VkSampler or VkImageView object that enables sampler {YCbCr} conversion, that object must not use the ConstOffset and Offset operands" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-viewType-07752", + "text": " If a VkImageView is accessed as a result of this command, then the image view’s viewType must match the Dim operand of the OpTypeImage as described in Instruction/Sampler/Image View Validation" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-format-07753", + "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit component width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a component width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit component width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a component width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpImageWeightedSampleQCOM-06971", + "text": " If OpImageWeightedSampleQCOM is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpImageWeightedSampleQCOM-06972", + "text": " If OpImageWeightedSampleQCOM uses a VkImageView as a sample weight image as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpImageBoxFilterQCOM-06973", + "text": " If OpImageBoxFilterQCOM is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpImageBlockMatchSSDQCOM-06974", + "text": " If OpImageBlockMatchSSDQCOM is used to read from an VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpImageBlockMatchSADQCOM-06975", + "text": " If OpImageBlockMatchSADQCOM is used to read from an VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpImageBlockMatchSADQCOM-06976", + "text": " If OpImageBlockMatchSADQCOM or OpImageBlockMatchSSDQCOM is used to read from a reference image as result of this command, then the specified reference coordinates must not fail integer texel coordinate validation" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpImageWeightedSampleQCOM-06977", + "text": " If OpImageWeightedSampleQCOM, OpImageBoxFilterQCOM, OpImageBlockMatchSSDQCOM, or OpImageBlockMatchSADQCOM uses a VkSampler as a result of this command, then the sampler must have been created with VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-OpImageWeightedSampleQCOM-06978", + "text": " If any command other than OpImageWeightedSampleQCOM, OpImageBoxFilterQCOM, OpImageBlockMatchSSDQCOM, or OpImageBlockMatchSADQCOM uses a VkSampler as a result of this command, then the sampler must not have been created with VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-None-07288", + "text": " Any shader invocation executed by this command must terminate" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-commandBuffer-09181", + "text": " commandBuffer must not be a protected command buffer" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-commandBuffer-09182", + "text": " commandBuffer must be a primary command buffer" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-scratch-09183", + "text": " scratch must be the device address of an allocated memory range at least as large as the value of VkExecutionGraphPipelineScratchSizeAMDX::size returned by VkExecutionGraphPipelineScratchSizeAMDX for the currently bound execution graph pipeline" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-scratch-09184", + "text": " scratch must be a device address within a VkBuffer created with the VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX or VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX flag" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-scratch-09185", + "text": " Device memory in the range [scratch,scratch
VkExecutionGraphPipelineScratchSizeAMDX::size) must have been initialized with vkCmdInitializeGraphScratchMemoryAMDX using the currently bound execution graph pipeline, and not modified after that by anything other than another execution graph dispatch command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-maxComputeWorkGroupCount-09186", + "text": " Execution of this command must not cause a node to be dispatched with a larger number of workgroups than that specified by either a MaxNumWorkgroupsAMDX decoration in the dispatched node or maxComputeWorkGroupCount" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-maxExecutionGraphShaderPayloadCount-09187", + "text": " Execution of this command must not cause any shader to initialize more than maxExecutionGraphShaderPayloadCount output payloads" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-NodeMaxPayloadsAMDX-09188", + "text": " Execution of this command must not cause any shader that declares NodeMaxPayloadsAMDX to initialize more output payloads than specified by the max number of payloads for that decoration. This requirement applies to each NodeMaxPayloadsAMDX decoration separately" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-pCountInfo-09145", + "text": " pCountInfo->infos must be a host pointer to a memory allocation at least as large as the product of count and stride" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-infos-09146", + "text": " Host memory locations at indexes in the range [infos, infos + (count*stride)), at a granularity of stride must contain valid VkDispatchGraphInfoAMDX structures in the first 24 bytes" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-pCountInfo-09147", + "text": " For each VkDispatchGraphInfoAMDX structure in pCountInfo->infos, payloads must be a host pointer to a memory allocation at least as large as the product of payloadCount and payloadStride" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-pCountInfo-09148", + "text": " For each VkDispatchGraphInfoAMDX structure in pCountInfo->infos, nodeIndex must be a valid node index in the currently bound execution graph pipeline, as returned by vkGetExecutionGraphPipelineNodeIndexAMDX" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-pCountInfo-09149", + "text": " For each VkDispatchGraphInfoAMDX structure in pCountInfo->infos, host memory locations at indexes in the range [payloads, payloads + (payloadCount * payloadStride)), at a granularity of payloadStride must contain a payload matching the size of the input payload expected by the node in nodeIndex in the first bytes" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-pCountInfo-parameter", + "text": " pCountInfo must be a valid pointer to a valid VkDispatchGraphCountInfoAMDX structure" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-videocoding", + "text": " This command must only be called outside of a video coding scope" + }, + { + "vuid": "VUID-vkCmdDispatchGraphAMDX-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" + } + ] + }, + "vkCmdDispatchGraphIndirectAMDX": { + "core": [ + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-06479", + "text": " If a VkImageView is sampled with depth comparison, the image view’s format features must contain VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-02691", + "text": " If a VkImageView is accessed using atomic operations as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-07888", + "text": " If a VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER descriptor is accessed using atomic operations as a result of this command, then the storage texel buffer’s format features must contain VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpTypeImage-07027", + "text": " For any VkImageView being written as a storage image where the image format field of the OpTypeImage is Unknown, the view’s format features must contain VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpTypeImage-07028", + "text": " For any VkImageView being read as a storage image where the image format field of the OpTypeImage is Unknown, the view’s format features must contain VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpTypeImage-07029", + "text": " For any VkBufferView being written as a storage texel buffer where the image format field of the OpTypeImage is Unknown, the view’s buffer features must contain VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpTypeImage-07030", + "text": " Any VkBufferView being read as a storage texel buffer where the image format field of the OpTypeImage is Unknown then the view’s buffer features must contain VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08600", + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08601", + "text": " For each push constant that is statically used by a bound shader, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout or VkDescriptorSetLayout and VkPushConstantRange arrays used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-maintenance4-08602", + "text": " If the maintenance4 feature is not enabled, then for each push constant that is statically used by a bound shader, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout or VkDescriptorSetLayout and VkPushConstantRange arrays used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08114", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command and the bound VkPipeline was not created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08115", + "text": " If the descriptors used by the VkPipeline bound to the pipeline bind point were specified via vkCmdBindDescriptorSets, the bound VkPipeline must have been created without VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08116", + "text": " Descriptors in bound descriptor buffers, specified via vkCmdSetDescriptorBufferOffsetsEXT, must be valid if they are dynamically used by the VkPipeline bound to the pipeline bind point used by this command and the bound VkPipeline was created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08604", + "text": " Descriptors in bound descriptor buffers, specified via vkCmdSetDescriptorBufferOffsetsEXT, must be valid if they are dynamically used by any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08117", + "text": " If the descriptors used by the VkPipeline bound to the pipeline bind point were specified via vkCmdSetDescriptorBufferOffsetsEXT, the bound VkPipeline must have been created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08119", + "text": " If a descriptor is dynamically used with a VkPipeline created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, the descriptor memory must be resident" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08605", + "text": " If a descriptor is dynamically used with a VkShaderEXT created with a VkDescriptorSetLayout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, the descriptor memory must be resident" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08606", + "text": " If the shaderObject feature is not enabled, a valid pipeline must be bound to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08607", + "text": " If the shaderObject is enabled, either a valid pipeline must be bound to the pipeline bind point used by this command, or a valid combination of valid and VK_NULL_HANDLE shader objects must be bound to every supported shader stage corresponding to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08608", + "text": " If a pipeline is bound to the pipeline bind point used by this command, there must not have been any calls to dynamic state setting commands for any state not specified as dynamic in the VkPipeline object bound to the pipeline bind point used by this command, since that pipeline was bound" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08609", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command or any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08610", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command or any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08611", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command or any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-uniformBuffers-06935", + "text": " If any stage of the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, and that stage was created without enabling either VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT or VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT for uniformBuffers, and the robustBufferAccess feature is not enabled, that stage must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08612", + "text": " If the robustBufferAccess feature is not enabled, and any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-storageBuffers-06936", + "text": " If any stage of the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, and that stage was created without enabling either VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT or VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT for storageBuffers, and the robustBufferAccess feature is not enabled, that stage must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-08613", + "text": " If the robustBufferAccess feature is not enabled, and any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer and protectedNoFault is not supported, any resource accessed by bound shaders must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-06550", + "text": " If a bound shader accesses a VkSampler or VkImageView object that enables sampler {YCbCr} conversion, that object must only be used with OpImageSample* or OpImageSparseSample* instructions" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-ConstOffset-06551", + "text": " If a bound shader accesses a VkSampler or VkImageView object that enables sampler {YCbCr} conversion, that object must not use the ConstOffset and Offset operands" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-viewType-07752", + "text": " If a VkImageView is accessed as a result of this command, then the image view’s viewType must match the Dim operand of the OpTypeImage as described in Instruction/Sampler/Image View Validation" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-format-07753", + "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit component width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a component width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit component width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a component width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpImageWeightedSampleQCOM-06971", + "text": " If OpImageWeightedSampleQCOM is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpImageWeightedSampleQCOM-06972", + "text": " If OpImageWeightedSampleQCOM uses a VkImageView as a sample weight image as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpImageBoxFilterQCOM-06973", + "text": " If OpImageBoxFilterQCOM is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpImageBlockMatchSSDQCOM-06974", + "text": " If OpImageBlockMatchSSDQCOM is used to read from an VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpImageBlockMatchSADQCOM-06975", + "text": " If OpImageBlockMatchSADQCOM is used to read from an VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpImageBlockMatchSADQCOM-06976", + "text": " If OpImageBlockMatchSADQCOM or OpImageBlockMatchSSDQCOM is used to read from a reference image as result of this command, then the specified reference coordinates must not fail integer texel coordinate validation" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpImageWeightedSampleQCOM-06977", + "text": " If OpImageWeightedSampleQCOM, OpImageBoxFilterQCOM, OpImageBlockMatchSSDQCOM, or OpImageBlockMatchSADQCOM uses a VkSampler as a result of this command, then the sampler must have been created with VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-OpImageWeightedSampleQCOM-06978", + "text": " If any command other than OpImageWeightedSampleQCOM, OpImageBoxFilterQCOM, OpImageBlockMatchSSDQCOM, or OpImageBlockMatchSADQCOM uses a VkSampler as a result of this command, then the sampler must not have been created with VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-None-07288", + "text": " Any shader invocation executed by this command must terminate" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-commandBuffer-09181", + "text": " commandBuffer must not be a protected command buffer" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-commandBuffer-09182", + "text": " commandBuffer must be a primary command buffer" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-scratch-09183", + "text": " scratch must be the device address of an allocated memory range at least as large as the value of VkExecutionGraphPipelineScratchSizeAMDX::size returned by VkExecutionGraphPipelineScratchSizeAMDX for the currently bound execution graph pipeline" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-scratch-09184", + "text": " scratch must be a device address within a VkBuffer created with the VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX or VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX flag" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-scratch-09185", + "text": " Device memory in the range [scratch,scratch
VkExecutionGraphPipelineScratchSizeAMDX::size) must have been initialized with vkCmdInitializeGraphScratchMemoryAMDX using the currently bound execution graph pipeline, and not modified after that by anything other than another execution graph dispatch command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-maxComputeWorkGroupCount-09186", + "text": " Execution of this command must not cause a node to be dispatched with a larger number of workgroups than that specified by either a MaxNumWorkgroupsAMDX decoration in the dispatched node or maxComputeWorkGroupCount" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-maxExecutionGraphShaderPayloadCount-09187", + "text": " Execution of this command must not cause any shader to initialize more than maxExecutionGraphShaderPayloadCount output payloads" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-NodeMaxPayloadsAMDX-09188", + "text": " Execution of this command must not cause any shader that declares NodeMaxPayloadsAMDX to initialize more output payloads than specified by the max number of payloads for that decoration. This requirement applies to each NodeMaxPayloadsAMDX decoration separately" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-pCountInfo-09150", + "text": " pCountInfo->infos must be a device pointer to a memory allocation at least as large as the product of count and stride when this command is executed on the device" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-pCountInfo-09151", + "text": " pCountInfo->infos must be a device address within a VkBuffer created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT flag" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-pCountInfo-09152", + "text": " pCountInfo->infos must be a multiple of executionGraphDispatchAddressAlignment" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-infos-09153", + "text": " Device memory locations at indexes in the range [infos, infos + (count*stride)), at a granularity of stride must contain valid VkDispatchGraphInfoAMDX structures in the first 24 bytes when this command is executed on the device" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-pCountInfo-09154", + "text": " For each VkDispatchGraphInfoAMDX structure in pCountInfo->infos, payloads must be a device pointer to a memory allocation at least as large as the product of payloadCount and payloadStride when this command is executed on the device" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-pCountInfo-09155", + "text": " For each VkDispatchGraphInfoAMDX structure in pCountInfo->infos, payloads must be a device address within a VkBuffer created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT flag" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-pCountInfo-09156", + "text": " For each VkDispatchGraphInfoAMDX structure in pCountInfo->infos, payloads must be a multiple of executionGraphDispatchAddressAlignment" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-pCountInfo-09157", + "text": " For each VkDispatchGraphInfoAMDX structure in pCountInfo->infos, nodeIndex must be a valid node index in the currently bound execution graph pipeline, as returned by vkGetExecutionGraphPipelineNodeIndexAMDX when this command is executed on the device" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-pCountInfo-09158", + "text": " For each VkDispatchGraphInfoAMDX structure in pCountInfo->infos, device memory locations at indexes in the range [payloads, payloads + (payloadCount * payloadStride)), at a granularity of payloadStride must contain a payload matching the size of the input payload expected by the node in nodeIndex in the first bytes when this command is executed on the device" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-pCountInfo-parameter", + "text": " pCountInfo must be a valid pointer to a valid VkDispatchGraphCountInfoAMDX structure" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-videocoding", + "text": " This command must only be called outside of a video coding scope" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectAMDX-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" + } + ] + }, + "vkCmdDispatchGraphIndirectCountAMDX": { + "core": [ + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-magFilter-04553", + "text": " If a VkSampler created with magFilter or minFilter equal to VK_FILTER_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-mipmapMode-04770", + "text": " If a VkSampler created with mipmapMode equal to VK_SAMPLER_MIPMAP_MODE_LINEAR and compareEnable equal to VK_FALSE is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-06479", + "text": " If a VkImageView is sampled with depth comparison, the image view’s format features must contain VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-02691", + "text": " If a VkImageView is accessed using atomic operations as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-07888", + "text": " If a VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER descriptor is accessed using atomic operations as a result of this command, then the storage texel buffer’s format features must contain VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-02692", + "text": " If a VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-02693", + "text": " If the VK_EXT_filter_cubic extension is not enabled and any VkImageView is sampled with VK_FILTER_CUBIC_EXT as a result of this command, it must not have a VkImageViewType of VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-filterCubic-02694", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT as a result of this command must have a VkImageViewType and format that supports cubic filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubic returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-filterCubicMinmax-02695", + "text": " Any VkImageView being sampled with VK_FILTER_CUBIC_EXT with a reduction mode of either VK_SAMPLER_REDUCTION_MODE_MIN or VK_SAMPLER_REDUCTION_MODE_MAX as a result of this command must have a VkImageViewType and format that supports cubic filtering together with minmax filtering, as specified by VkFilterCubicImageViewImageFormatPropertiesEXT::filterCubicMinmax returned by vkGetPhysicalDeviceImageFormatProperties2" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-flags-02696", + "text": " Any VkImage created with a VkImageCreateInfo::flags containing VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV sampled as a result of this command must only be sampled using a VkSamplerAddressMode of VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpTypeImage-07027", + "text": " For any VkImageView being written as a storage image where the image format field of the OpTypeImage is Unknown, the view’s format features must contain VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpTypeImage-07028", + "text": " For any VkImageView being read as a storage image where the image format field of the OpTypeImage is Unknown, the view’s format features must contain VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpTypeImage-07029", + "text": " For any VkBufferView being written as a storage texel buffer where the image format field of the OpTypeImage is Unknown, the view’s buffer features must contain VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpTypeImage-07030", + "text": " Any VkBufferView being read as a storage texel buffer where the image format field of the OpTypeImage is Unknown then the view’s buffer features must contain VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08600", + "text": " For each set n that is statically used by a bound shader, a descriptor set must have been bound to n at the same pipeline bind point, with a VkPipelineLayout that is compatible for set n, with the VkPipelineLayout or VkDescriptorSetLayout array that was used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08601", + "text": " For each push constant that is statically used by a bound shader, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout or VkDescriptorSetLayout and VkPushConstantRange arrays used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-maintenance4-08602", + "text": " If the maintenance4 feature is not enabled, then for each push constant that is statically used by a bound shader, a push constant value must have been set for the same pipeline bind point, with a VkPipelineLayout that is compatible for push constants, with the VkPipelineLayout or VkDescriptorSetLayout and VkPushConstantRange arrays used to create the current VkPipeline or VkShaderEXT, as described in Pipeline Layout Compatibility" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08114", + "text": " Descriptors in each bound descriptor set, specified via vkCmdBindDescriptorSets, must be valid if they are statically used by the VkPipeline bound to the pipeline bind point used by this command and the bound VkPipeline was not created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08115", + "text": " If the descriptors used by the VkPipeline bound to the pipeline bind point were specified via vkCmdBindDescriptorSets, the bound VkPipeline must have been created without VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08116", + "text": " Descriptors in bound descriptor buffers, specified via vkCmdSetDescriptorBufferOffsetsEXT, must be valid if they are dynamically used by the VkPipeline bound to the pipeline bind point used by this command and the bound VkPipeline was created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08604", + "text": " Descriptors in bound descriptor buffers, specified via vkCmdSetDescriptorBufferOffsetsEXT, must be valid if they are dynamically used by any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08117", + "text": " If the descriptors used by the VkPipeline bound to the pipeline bind point were specified via vkCmdSetDescriptorBufferOffsetsEXT, the bound VkPipeline must have been created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08119", + "text": " If a descriptor is dynamically used with a VkPipeline created with VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, the descriptor memory must be resident" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08605", + "text": " If a descriptor is dynamically used with a VkShaderEXT created with a VkDescriptorSetLayout that was created with VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, the descriptor memory must be resident" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08606", + "text": " If the shaderObject feature is not enabled, a valid pipeline must be bound to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08607", + "text": " If the shaderObject is enabled, either a valid pipeline must be bound to the pipeline bind point used by this command, or a valid combination of valid and VK_NULL_HANDLE shader objects must be bound to every supported shader stage corresponding to the pipeline bind point used by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08608", + "text": " If a pipeline is bound to the pipeline bind point used by this command, there must not have been any calls to dynamic state setting commands for any state not specified as dynamic in the VkPipeline object bound to the pipeline bind point used by this command, since that pipeline was bound" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08609", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command or any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used to sample from any VkImage with a VkImageView of the type VK_IMAGE_VIEW_TYPE_3D, VK_IMAGE_VIEW_TYPE_CUBE, VK_IMAGE_VIEW_TYPE_1D_ARRAY, VK_IMAGE_VIEW_TYPE_2D_ARRAY or VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08610", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command or any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions with ImplicitLod, Dref or Proj in their name, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08611", + "text": " If the VkPipeline object bound to the pipeline bind point used by this command or any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a VkSampler object that uses unnormalized coordinates, that sampler must not be used with any of the SPIR-V OpImageSample* or OpImageSparseSample* instructions that includes a LOD bias or any offset values, in any shader stage" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-uniformBuffers-06935", + "text": " If any stage of the VkPipeline object bound to the pipeline bind point used by this command accesses a uniform buffer, and that stage was created without enabling either VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT or VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT for uniformBuffers, and the robustBufferAccess feature is not enabled, that stage must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08612", + "text": " If the robustBufferAccess feature is not enabled, and any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a uniform buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-storageBuffers-06936", + "text": " If any stage of the VkPipeline object bound to the pipeline bind point used by this command accesses a storage buffer, and that stage was created without enabling either VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT or VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT for storageBuffers, and the robustBufferAccess feature is not enabled, that stage must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-08613", + "text": " If the robustBufferAccess feature is not enabled, and any VkShaderEXT bound to a stage corresponding to the pipeline bind point used by this command accesses a storage buffer, it must not access values outside of the range of the buffer as specified in the descriptor set bound to the same pipeline bind point" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-commandBuffer-02707", + "text": " If commandBuffer is an unprotected command buffer and protectedNoFault is not supported, any resource accessed by bound shaders must not be a protected resource" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-06550", + "text": " If a bound shader accesses a VkSampler or VkImageView object that enables sampler {YCbCr} conversion, that object must only be used with OpImageSample* or OpImageSparseSample* instructions" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-ConstOffset-06551", + "text": " If a bound shader accesses a VkSampler or VkImageView object that enables sampler {YCbCr} conversion, that object must not use the ConstOffset and Offset operands" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-viewType-07752", + "text": " If a VkImageView is accessed as a result of this command, then the image view’s viewType must match the Dim operand of the OpTypeImage as described in Instruction/Sampler/Image View Validation" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-format-07753", + "text": " If a VkImageView is accessed as a result of this command, then the numeric type of the image view’s format and the Sampled Type operand of the OpTypeImage must match" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpImageWrite-08795", + "text": " If a VkImageView created with a format other than VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the image view’s format" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpImageWrite-08796", + "text": " If a VkImageView created with the format VK_FORMAT_A8_UNORM_KHR is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have four components" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpImageWrite-04469", + "text": " If a VkBufferView is accessed using OpImageWrite as a result of this command, then the Type of the Texel operand of that instruction must have at least as many components as the buffer view’s format" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-SampledType-04470", + "text": " If a VkImageView with a VkFormat that has a 64-bit component width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-SampledType-04471", + "text": " If a VkImageView with a VkFormat that has a component width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-SampledType-04472", + "text": " If a VkBufferView with a VkFormat that has a 64-bit component width is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 64" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-SampledType-04473", + "text": " If a VkBufferView with a VkFormat that has a component width less than 64-bit is accessed as a result of this command, the SampledType of the OpTypeImage operand of that instruction must have a Width of 32" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-sparseImageInt64Atomics-04474", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkImage objects created with the VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-sparseImageInt64Atomics-04475", + "text": " If the sparseImageInt64Atomics feature is not enabled, VkBuffer objects created with the VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT flag must not be accessed by atomic instructions through an OpTypeImage with a SampledType with a Width of 64 by this command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpImageWeightedSampleQCOM-06971", + "text": " If OpImageWeightedSampleQCOM is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpImageWeightedSampleQCOM-06972", + "text": " If OpImageWeightedSampleQCOM uses a VkImageView as a sample weight image as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpImageBoxFilterQCOM-06973", + "text": " If OpImageBoxFilterQCOM is used to sample a VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpImageBlockMatchSSDQCOM-06974", + "text": " If OpImageBlockMatchSSDQCOM is used to read from an VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpImageBlockMatchSADQCOM-06975", + "text": " If OpImageBlockMatchSADQCOM is used to read from an VkImageView as a result of this command, then the image view’s format features must contain VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpImageBlockMatchSADQCOM-06976", + "text": " If OpImageBlockMatchSADQCOM or OpImageBlockMatchSSDQCOM is used to read from a reference image as result of this command, then the specified reference coordinates must not fail integer texel coordinate validation" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpImageWeightedSampleQCOM-06977", + "text": " If OpImageWeightedSampleQCOM, OpImageBoxFilterQCOM, OpImageBlockMatchSSDQCOM, or OpImageBlockMatchSADQCOM uses a VkSampler as a result of this command, then the sampler must have been created with VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-OpImageWeightedSampleQCOM-06978", + "text": " If any command other than OpImageWeightedSampleQCOM, OpImageBoxFilterQCOM, OpImageBlockMatchSSDQCOM, or OpImageBlockMatchSADQCOM uses a VkSampler as a result of this command, then the sampler must not have been created with VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-None-07288", + "text": " Any shader invocation executed by this command must terminate" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-commandBuffer-09181", + "text": " commandBuffer must not be a protected command buffer" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-commandBuffer-09182", + "text": " commandBuffer must be a primary command buffer" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-scratch-09183", + "text": " scratch must be the device address of an allocated memory range at least as large as the value of VkExecutionGraphPipelineScratchSizeAMDX::size returned by VkExecutionGraphPipelineScratchSizeAMDX for the currently bound execution graph pipeline" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-scratch-09184", + "text": " scratch must be a device address within a VkBuffer created with the VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX or VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX flag" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-scratch-09185", + "text": " Device memory in the range [scratch,scratch
VkExecutionGraphPipelineScratchSizeAMDX::size) must have been initialized with vkCmdInitializeGraphScratchMemoryAMDX using the currently bound execution graph pipeline, and not modified after that by anything other than another execution graph dispatch command" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-maxComputeWorkGroupCount-09186", + "text": " Execution of this command must not cause a node to be dispatched with a larger number of workgroups than that specified by either a MaxNumWorkgroupsAMDX decoration in the dispatched node or maxComputeWorkGroupCount" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-maxExecutionGraphShaderPayloadCount-09187", + "text": " Execution of this command must not cause any shader to initialize more than maxExecutionGraphShaderPayloadCount output payloads" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-NodeMaxPayloadsAMDX-09188", + "text": " Execution of this command must not cause any shader that declares NodeMaxPayloadsAMDX to initialize more output payloads than specified by the max number of payloads for that decoration. This requirement applies to each NodeMaxPayloadsAMDX decoration separately" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09159", + "text": " countInfo must be a device pointer to a memory allocation containing a valid VkDispatchGraphCountInfoAMDX structure when this command is executed on the device" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09160", + "text": " countInfo must be a device address within a VkBuffer created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT flag" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09161", + "text": " countInfo must be a multiple of executionGraphDispatchAddressAlignment" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09162", + "text": " countInfo->infos must be a device pointer to a memory allocation at least as large as the product of count and stride when this command is executed on the device" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09163", + "text": " countInfo->infos must be a device address within a VkBuffer created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT flag" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09164", + "text": " countInfo->infos must be a multiple of executionGraphDispatchAddressAlignment" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-infos-09165", + "text": " Device memory locations at indexes in the range [infos, infos + (count*stride)), at a granularity of stride must contain valid VkDispatchGraphInfoAMDX structures in the first 24 bytes when this command is executed on the device" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09166", + "text": " For each VkDispatchGraphInfoAMDX structure in countInfo->infos, payloads must be a device pointer to a memory allocation at least as large as the product of payloadCount and payloadStride when this command is executed on the device" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09167", + "text": " For each VkDispatchGraphInfoAMDX structure in countInfo->infos, payloads must be a device address within a VkBuffer created with the VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT flag" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09168", + "text": " For each VkDispatchGraphInfoAMDX structure in countInfo->infos, payloads must be a multiple of executionGraphDispatchAddressAlignment" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09169", + "text": " For each VkDispatchGraphInfoAMDX structure in countInfo->infos, nodeIndex must be a valid node index in the currently bound execution graph pipeline, as returned by vkGetExecutionGraphPipelineNodeIndexAMDX when this command is executed on the device" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-countInfo-09170", + "text": " For each VkDispatchGraphInfoAMDX structure in countInfo->infos, device memory locations at indexes in the range [payloads, payloads + (payloadCount * payloadStride)), at a granularity of payloadStride must contain a payload matching the size of the input payload expected by the node in nodeIndex in the first bytes when this command is executed on the device" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-commandBuffer-parameter", + "text": " commandBuffer must be a valid VkCommandBuffer handle" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-commandBuffer-recording", + "text": " commandBuffer must be in the recording state" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-commandBuffer-cmdpool", + "text": " The VkCommandPool that commandBuffer was allocated from must support graphics, or compute operations" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-renderpass", + "text": " This command must only be called outside of a render pass instance" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-videocoding", + "text": " This command must only be called outside of a video coding scope" + }, + { + "vuid": "VUID-vkCmdDispatchGraphIndirectCountAMDX-bufferlevel", + "text": " commandBuffer must be a primary VkCommandBuffer" + } + ] + }, + "VkDispatchGraphInfoAMDX": { + "core": [ + { + "vuid": "VUID-VkDispatchGraphInfoAMDX-payloadCount-09171", + "text": " payloadCount must be no greater than maxExecutionGraphShaderPayloadCount" + } + ] + }, "vkEnumerateInstanceLayerProperties": { "core": [ { @@ -72210,6 +73934,14 @@ } ] }, + "VkPhysicalDeviceMaintenance5FeaturesKHR": { + "core": [ + { + "vuid": "VUID-VkPhysicalDeviceMaintenance5FeaturesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR" + } + ] + }, "VkPhysicalDeviceDynamicRenderingFeatures": { "core": [ { @@ -72490,6 +74222,14 @@ } ] }, + "VkPhysicalDeviceShaderEnqueueFeaturesAMDX": { + "core": [ + { + "vuid": "VUID-VkPhysicalDeviceShaderEnqueueFeaturesAMDX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX" + } + ] + }, "VkPhysicalDevicePushDescriptorPropertiesKHR": { "core": [ { @@ -72618,6 +74358,14 @@ } ] }, + "VkPhysicalDeviceMaintenance5PropertiesKHR": { + "core": [ + { + "vuid": "VUID-VkPhysicalDeviceMaintenance5PropertiesKHR-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR" + } + ] + }, "VkPhysicalDeviceMeshShaderPropertiesNV": { "core": [ { @@ -73030,6 +74778,14 @@ } ] }, + "VkPhysicalDeviceShaderEnqueuePropertiesAMDX": { + "core": [ + { + "vuid": "VUID-VkPhysicalDeviceShaderEnqueuePropertiesAMDX-sType-sType", + "text": " sType must be VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX" + } + ] + }, "vkGetPhysicalDeviceMultisamplePropertiesEXT": { "core": [ { @@ -73438,7 +75194,11 @@ }, { "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-pNext-pNext", - "text": " pNext must be NULL" + "text": " pNext must be NULL or a pointer to a valid instance of VkBufferUsageFlags2CreateInfoKHR" + }, + { + "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-sType-unique", + "text": " The sType value of each struct in the pNext chain must be unique" }, { "vuid": "VUID-VkPhysicalDeviceExternalBufferInfo-flags-parameter", @@ -74762,7 +76522,7 @@ }, { "vuid": "VUID-StandaloneSpirv-OpTypeRuntimeArray-04680", - "text": " OpTypeRuntimeArray must only be used for the last member of a Block-decorated OpTypeStruct in StorageBuffer or PhysicalStorageBuffer {StorageClass}; BufferBlock-decorated OpTypeStruct in Uniform {StorageClass}; the outermost dimension of an arrayed variable in the StorageBuffer, Uniform, or UniformConstant {StorageClass}" + "text": " OpTypeRuntimeArray must only be used for:
\n
    \n
  • \n

    the last member of a Block-decorated OpTypeStruct in\nStorageBuffer or PhysicalStorageBuffer storage {StorageClass}

    \n
  • \n
  • \n

    BufferBlock-decorated OpTypeStruct in the Uniform\nstorage {StorageClass}

    \n
  • \n
  • \n

    the outermost dimension of an arrayed variable in the\nStorageBuffer, Uniform, or UniformConstant storage\n{StorageClass}

    \n
  • \n
  • \n

    variables in the NodePayloadAMDX storage {StorageClass} when the\nCoalescingAMDX Execution Mode is specified

    \n
  • \n
\n
" }, { "vuid": "VUID-StandaloneSpirv-Function-04681", @@ -74989,8 +76749,12 @@ "text": " In mesh shaders using the MeshEXT {ExecutionModel} the OutputPrimitivesEXT {ExecutionMode} must be greater than 0" }, { - "vuid": "VUID-StandaloneSpirv-MeshEXT-07728", - "text": " In mesh shaders using the MeshEXT or MeshNV {ExecutionModel} and the OutputPoints {ExecutionMode}, if the number of output points is greater than 0, a PointSize decorated variable must be written to for each output point" + "vuid": "VUID-StandaloneSpirv-maintenance5-09189", + "text": " In mesh shaders using the MeshEXT or MeshNV {ExecutionModel} and the OutputPoints {ExecutionMode}, if the number of output points is greater than 0, a PointSize decorated variable must be written to for each output point if maintenance5 is not enabled" + }, + { + "vuid": "VUID-StandaloneSpirv-maintenance5-09190", + "text": " If maintenance5 is enabled and a PointSize decorated variable is written to, all execution paths must write to a PointSize decorated variable" }, { "vuid": "VUID-StandaloneSpirv-Input-07290", @@ -75831,6 +77595,34 @@ { "vuid": "VUID-RuntimeSpirv-minSampleShading-08732", "text": " If sample shading is enabled and any of the OpColorAttachmentReadEXT, OpDepthAttachmentReadEXT, or OpStencilAttachmentReadEXT operations are used, then minSampleShading must be 1.0" + }, + { + "vuid": "VUID-RuntimeSpirv-ShaderEnqueueAMDX-09191", + "text": " The ShaderEnqueueAMDX capability must only be used in shaders with the GLCompute execution model" + }, + { + "vuid": "VUID-RuntimeSpirv-NodePayloadAMDX-09192", + "text": " Variables in the NodePayloadAMDX storage class must only be declared in the GLCompute execution model" + }, + { + "vuid": "VUID-RuntimeSpirv-maxExecutionGraphShaderPayloadSize-09193", + "text": " Variables declared in the NodePayloadAMDX storage class must not be larger than the maxExecutionGraphShaderPayloadSize limit" + }, + { + "vuid": "VUID-RuntimeSpirv-maxExecutionGraphShaderPayloadSize-09194", + "text": " Variables declared in the NodeOutputPayloadAMDX storage class must not be larger than the maxExecutionGraphShaderPayloadSize limit" + }, + { + "vuid": "VUID-RuntimeSpirv-maxExecutionGraphShaderPayloadSize-09195", + "text": " For a given entry point, the sum of the size of any variable in the NodePayloadAMDX storage class, and the combined size of all statically initialized variables in the NodeOutputPayloadAMDX storage class must not be greater than maxExecutionGraphShaderPayloadSize" + }, + { + "vuid": "VUID-RuntimeSpirv-maxExecutionGraphShaderPayloadCount-09196", + "text": " Shaders must not statically initialize more than maxExecutionGraphShaderPayloadCount variables in the NodeOutputPayloadAMDX storage class" + }, + { + "vuid": "VUID-RuntimeSpirv-maxExecutionGraphShaderOutputNodes-09197", + "text": " Shaders must not include more than maxExecutionGraphShaderOutputNodes instances of OpInitializeNodePayloadsAMDX" } ] }, diff --git a/registry/vk.xml b/registry/vk.xml index 5488ca7..898bd9f 100644 --- a/registry/vk.xml +++ b/registry/vk.xml @@ -175,7 +175,7 @@ branch of the member gitlab server. #define VKSC_API_VERSION_1_0 VK_MAKE_API_VERSION(VKSC_API_VARIANT, 1, 0, 0)// Patch version should always be set to 0
// Version of this file -#define VK_HEADER_VERSION 259 +#define VK_HEADER_VERSION 260
// Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) // Version of this file @@ -390,6 +390,8 @@ typedef void* MTLSharedEvent_id; typedef VkFlags VkBuildMicromapFlagsEXT; typedef VkFlags VkMicromapCreateFlagsEXT; typedef VkFlags VkDirectDriverLoadingFlagsLUNARG; + typedef VkFlags64 VkPipelineCreateFlags2KHR; + typedef VkFlags64 VkBufferUsageFlags2KHR; WSI extensions typedef VkFlags VkCompositeAlphaFlagsKHR; @@ -776,6 +778,8 @@ typedef void* MTLSharedEvent_id; + + @@ -1183,6 +1187,11 @@ typedef void* MTLSharedEvent_id; uint32_t dstArrayElementArray element within the destination binding to copy to uint32_t descriptorCountNumber of descriptors to write (determines the size of the array pointed by pDescriptors) + + VkStructureType sType + const void* pNext + VkBufferUsageFlags2KHR usage + VkStructureType sType const void* pNext @@ -1450,6 +1459,11 @@ typedef void* MTLSharedEvent_id; VkDeviceSize size VkDeviceAddress pipelineDeviceAddressCaptureReplay + + VkStructureType sType + const void* pNext + VkPipelineCreateFlags2KHR flags + uint32_t bindingVertex buffer binding id uint32_t strideDistance between vertices in bytes (0 = no advancement) @@ -3665,6 +3679,30 @@ typedef void* MTLSharedEvent_id; VkDeviceSize maxBufferSize + + VkStructureType sType + void* pNext + VkBool32 maintenance5 + + + VkStructureType sType + void* pNext + VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting + VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting + VkBool32 depthStencilSwizzleOneSupport + VkBool32 polygonModePointSize + VkBool32 nonStrictSinglePixelWideLinesUseParallelogram + VkBool32 nonStrictWideLinesUseParallelogram + + + VkStructureType sType + const void* pNext + uint32_t viewMask + uint32_t colorAttachmentCount + const VkFormat* pColorAttachmentFormats + VkFormat depthAttachmentFormat + VkFormat stencilAttachmentFormat + VkStructureType sType void* pNext @@ -4972,7 +5010,7 @@ typedef void* MTLSharedEvent_id; uint64_t duration - + VkStructureType sType const void* pNext VkPipelineCreationFeedback* pPipelineCreationFeedbackOutput pipeline creation feedback. @@ -5556,7 +5594,7 @@ typedef void* MTLSharedEvent_id; VkBool32 uniformTexelBufferOffsetSingleTexelAlignment VkDeviceSize maxBufferSize - + VkStructureType sType const void* pNext VkPipelineCompilerControlFlagsAMD compilerControlFlags @@ -5626,6 +5664,10 @@ typedef void* MTLSharedEvent_id; VkDeviceAddress deviceAddress const void* hostAddress + + VkDeviceAddress deviceAddress + const void* hostAddress + VkStructureType sType const void* pNext @@ -6389,7 +6431,7 @@ typedef void* MTLSharedEvent_id; VkImageLayout newLayout VkImageSubresourceRange subresourceRange - + VkStructureType sType void* pNext VkDeviceSize sizeSpecified in bytes @@ -7704,7 +7746,7 @@ typedef void* MTLSharedEvent_id; VkStructureType sType - void* pNext + const void* pNext VkGraphicsPipelineLibraryFlagsEXT flags @@ -7758,7 +7800,7 @@ typedef void* MTLSharedEvent_id; void* pNext VkBool32 imageCompressionControl - + VkStructureType sType void* pNext VkImageCompressionFlagsEXT imageCompressionFlags @@ -7769,16 +7811,18 @@ typedef void* MTLSharedEvent_id; void* pNext VkBool32 imageCompressionControlSwapchain - - VkStructureType sType + + VkStructureType sType void* pNext VkImageSubresource imageSubresource - - VkStructureType sType + + + VkStructureType sType void* pNext VkSubresourceLayout subresourceLayout + VkStructureType sType const void* pNext @@ -8360,6 +8404,12 @@ typedef void* MTLSharedEvent_id; void* pNext VkBool32 rayTracingPositionFetch + + VkStructureType sType + const void* pNext + const VkImageCreateInfo* pCreateInfo + const VkImageSubresource2KHR* pSubresource + VkStructureType sType void* pNext @@ -8496,6 +8546,53 @@ typedef void* MTLSharedEvent_id; void* pNext VkShaderStageFlags cooperativeMatrixSupportedStages + + VkStructureType sType + void* pNext + uint32_t maxExecutionGraphDepth + uint32_t maxExecutionGraphShaderOutputNodes + uint32_t maxExecutionGraphShaderPayloadSize + uint32_t maxExecutionGraphShaderPayloadCount + uint32_t executionGraphDispatchAddressAlignment + + + VkStructureType sType + void* pNext + VkBool32 shaderEnqueue + + + VkStructureType sType + const void* pNext + VkPipelineCreateFlags flags + uint32_t stageCount + const VkPipelineShaderStageCreateInfo* pStages + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo + VkPipelineLayout layout + VkPipeline basePipelineHandle + int32_t basePipelineIndex + + + VkStructureType sType + const void* pNext + const char* pName + uint32_t index + + + VkStructureType sType + void* pNext + VkDeviceSize size + + + uint32_t nodeIndex + uint32_t payloadCount + VkDeviceOrHostAddressConstAMDX payloads + uint64_t payloadStride + + + uint32_t count + VkDeviceOrHostAddressConstAMDX infos + uint64_t stride + @@ -8534,6 +8631,7 @@ typedef void* MTLSharedEvent_id; + @@ -9141,6 +9239,17 @@ typedef void* MTLSharedEvent_id; + + + + + + + + + + + @@ -9177,11 +9286,16 @@ typedef void* MTLSharedEvent_id; - + + + + + + @@ -11173,6 +11287,12 @@ typedef void* MTLSharedEvent_id; VkRenderPass renderPass VkExtent2D* pGranularity + + void vkGetRenderingAreaGranularityKHR + VkDevice device + const VkRenderingAreaInfoKHR* pRenderingAreaInfo + VkExtent2D* pGranularity + VkResult vkCreateCommandPool VkDevice device @@ -13517,6 +13637,14 @@ typedef void* MTLSharedEvent_id; const VkRect2D* pScissors + + void vkCmdBindIndexBuffer2KHR + VkCommandBuffer commandBuffer + VkBuffer buffer + VkDeviceSize offset + VkDeviceSize size + VkIndexType indexType + void vkCmdBindVertexBuffers2 VkCommandBuffer commandBuffer @@ -14369,12 +14497,13 @@ typedef void* MTLSharedEvent_id; VkShaderModuleIdentifierEXT* pIdentifier - void vkGetImageSubresourceLayout2EXT + void vkGetImageSubresourceLayout2KHR VkDevice device VkImage image - const VkImageSubresource2EXT* pSubresource - VkSubresourceLayout2EXT* pLayout + const VkImageSubresource2KHR* pSubresource + VkSubresourceLayout2KHR* pLayout + VkResult vkGetPipelinePropertiesEXT VkDevice device @@ -14449,6 +14578,12 @@ typedef void* MTLSharedEvent_id; VkDevice device const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo + + void vkGetDeviceImageSubresourceLayoutKHR + VkDevice device + const VkDeviceImageSubresourceInfoKHR* pInfo + VkSubresourceLayout2KHR* pLayout + VkResult vkMapMemory2KHR VkDevice device @@ -14500,6 +14635,51 @@ typedef void* MTLSharedEvent_id; uint32_t* pPropertyCount VkCooperativeMatrixPropertiesKHR* pProperties + + VkResult vkGetExecutionGraphPipelineScratchSizeAMDX + VkDevice device + VkPipeline executionGraph + VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo + + + VkResult vkGetExecutionGraphPipelineNodeIndexAMDX + VkDevice device + VkPipeline executionGraph + const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo + uint32_t* pNodeIndex + + + VkResult vkCreateExecutionGraphPipelinesAMDX + VkDevice device + VkPipelineCache pipelineCache + uint32_t createInfoCount + const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos + const VkAllocationCallbacks* pAllocator + VkPipeline* pPipelines + + + void vkCmdInitializeGraphScratchMemoryAMDX + VkCommandBuffer commandBuffer + VkDeviceAddress scratch + + + void vkCmdDispatchGraphAMDX + VkCommandBuffer commandBuffer + VkDeviceAddress scratch + const VkDispatchGraphCountInfoAMDX* pCountInfo + + + void vkCmdDispatchGraphIndirectAMDX + VkCommandBuffer commandBuffer + VkDeviceAddress scratch + const VkDispatchGraphCountInfoAMDX* pCountInfo + + + void vkCmdDispatchGraphIndirectCountAMDX + VkCommandBuffer commandBuffer + VkDeviceAddress scratch + VkDeviceAddress countInfo + @@ -16441,8 +16621,6 @@ typedef void* MTLSharedEvent_id; - - @@ -16454,6 +16632,10 @@ typedef void* MTLSharedEvent_id; + + + + @@ -17861,11 +18043,36 @@ typedef void* MTLSharedEvent_id; - + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -18082,7 +18289,6 @@ typedef void* MTLSharedEvent_id; - @@ -18144,6 +18350,9 @@ typedef void* MTLSharedEvent_id; + + + @@ -18247,7 +18456,6 @@ typedef void* MTLSharedEvent_id; - @@ -18318,6 +18526,7 @@ typedef void* MTLSharedEvent_id; + @@ -18487,7 +18696,6 @@ typedef void* MTLSharedEvent_id; - @@ -18532,7 +18740,6 @@ typedef void* MTLSharedEvent_id; - @@ -18550,6 +18757,12 @@ typedef void* MTLSharedEvent_id; + + + + + + @@ -18616,6 +18829,9 @@ typedef void* MTLSharedEvent_id; + + + @@ -19339,7 +19555,7 @@ typedef void* MTLSharedEvent_id; - + @@ -19908,7 +20124,7 @@ typedef void* MTLSharedEvent_id; - + @@ -20734,9 +20950,9 @@ typedef void* MTLSharedEvent_id; - + - + @@ -21003,7 +21219,6 @@ typedef void* MTLSharedEvent_id; - @@ -21033,6 +21248,9 @@ typedef void* MTLSharedEvent_id; + + + @@ -21440,6 +21658,9 @@ typedef void* MTLSharedEvent_id; + + + @@ -21844,6 +22065,7 @@ typedef void* MTLSharedEvent_id; + @@ -21890,6 +22112,7 @@ typedef void* MTLSharedEvent_id; + @@ -21933,16 +22156,6 @@ typedef void* MTLSharedEvent_id; - - - - - - - - - - @@ -21968,15 +22181,39 @@ typedef void* MTLSharedEvent_id; + + + + + + + + + + + + + + + + + + + + + + + + @@ -22149,7 +22386,7 @@ typedef void* MTLSharedEvent_id; - + @@ -22158,10 +22395,132 @@ typedef void* MTLSharedEvent_id; - + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -22300,16 +22659,6 @@ typedef void* MTLSharedEvent_id; - - - - - - - - - - @@ -22327,6 +22676,30 @@ typedef void* MTLSharedEvent_id; + + + + + + + + + + + + + + + + + + + + + + + + @@ -22344,6 +22717,8 @@ typedef void* MTLSharedEvent_id; + + @@ -22556,6 +22931,7 @@ typedef void* MTLSharedEvent_id; + @@ -22661,7 +23037,7 @@ typedef void* MTLSharedEvent_id; - + @@ -22797,6 +23173,12 @@ typedef void* MTLSharedEvent_id; + + + + + + @@ -22843,6 +23225,15 @@ typedef void* MTLSharedEvent_id; + + + + + + + + + @@ -24890,6 +25281,9 @@ typedef void* MTLSharedEvent_id; + + + From 6dcbe90f9bfe0d25cd29e92428f270f16ebbc1df Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Fri, 28 Jul 2023 23:44:19 -0700 Subject: [PATCH 15/17] Update CoC link --- CODE_OF_CONDUCT.adoc | 10 ++++++++++ CODE_OF_CONDUCT.md | 1 - 2 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 CODE_OF_CONDUCT.adoc delete mode 100644 CODE_OF_CONDUCT.md diff --git a/CODE_OF_CONDUCT.adoc b/CODE_OF_CONDUCT.adoc new file mode 100644 index 0000000..d923209 --- /dev/null +++ b/CODE_OF_CONDUCT.adoc @@ -0,0 +1,10 @@ +// Copyright 2018-2023 The Khronos Group Inc. +// SPDX-License-Identifier: CC-BY-4.0 + += Code of Conduct + +A reminder that this repository is managed by the Khronos Group. +Interactions here should follow the +https://www.khronos.org/about/code-of-conduct[Khronos Code of Conduct], +which prohibits aggressive or derogatory language. Please keep the +discussion friendly and civil. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md deleted file mode 100644 index a11610b..0000000 --- a/CODE_OF_CONDUCT.md +++ /dev/null @@ -1 +0,0 @@ -A reminder that this issue tracker is managed by the Khronos Group. Interactions here should follow the Khronos Code of Conduct (https://www.khronos.org/developers/code-of-conduct), which prohibits aggressive or derogatory language. Please keep the discussion friendly and civil. From a3b683653e6a498514ef8a1865594810e91c594c Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Fri, 28 Jul 2023 23:49:14 -0700 Subject: [PATCH 16/17] Use MIT license --- CODE_OF_CONDUCT.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CODE_OF_CONDUCT.adoc b/CODE_OF_CONDUCT.adoc index d923209..bd635bb 100644 --- a/CODE_OF_CONDUCT.adoc +++ b/CODE_OF_CONDUCT.adoc @@ -1,5 +1,5 @@ // Copyright 2018-2023 The Khronos Group Inc. -// SPDX-License-Identifier: CC-BY-4.0 +// SPDX-License-Identifier: MIT = Code of Conduct From 450ead13e1064584da027d91192bd7bfb724640f Mon Sep 17 00:00:00 2001 From: Jon Leech Date: Fri, 4 Aug 2023 04:01:23 -0700 Subject: [PATCH 17/17] Update for Vulkan-Docs 1.3.261 --- include/vulkan/vulkan.hpp | 2 +- include/vulkan/vulkan_core.h | 46 ++++---- include/vulkan/vulkan_enums.hpp | 91 ++++++++-------- include/vulkan/vulkan_to_string.hpp | 130 +++++++++++------------ registry/validusage.json | 156 ++++++++++++++++++++++------ registry/vk.xml | 61 ++++++----- 6 files changed, 292 insertions(+), 194 deletions(-) diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp index 8f46fc8..0378930 100644 --- a/include/vulkan/vulkan.hpp +++ b/include/vulkan/vulkan.hpp @@ -114,7 +114,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 260, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 261, "Wrong VK_HEADER_VERSION!" ); // 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h index 78b0da7..c574858 100644 --- a/include/vulkan/vulkan_core.h +++ b/include/vulkan/vulkan_core.h @@ -69,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 260 +#define VK_HEADER_VERSION 261 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -10441,16 +10441,15 @@ typedef VkFlags64 VkPipelineCreateFlagBits2KHR; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RESERVED_BIT_28_NV = 0x10000000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_KHR = 0x00000020ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV = 0x00000020ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR = 0x00000040ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR = 0x00000100ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR = 0x00000200ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_KHR = 0x00000400ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_KHR = 0x00800000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR = 0x00000800ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000ULL; @@ -10459,16 +10458,17 @@ static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NU static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_KHR = 0x00040000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_KHR = 0x00100000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV = 0x00040000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_KHR = 0x00400000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_KHR = 0x01000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR = 0x02000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR = 0x04000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_KHR = 0x08000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_KHR = 0x40000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_KHR = 0x20000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000ULL; typedef VkFlags64 VkBufferUsageFlags2KHR; @@ -10484,11 +10484,11 @@ static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR = static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR = 0x00000080ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR = 0x00000100ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_KHR = 0x00000200ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RAY_TRACING_BIT_KHR = 0x00000400ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_KHR = 0x00000800ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_KHR = 0x00001000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV = 0x00000400ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR = 0x00004000ULL; #ifdef VK_ENABLE_BETA_EXTENSIONS @@ -10500,11 +10500,11 @@ static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KH static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR = 0x00020000ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_KHR = 0x00200000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_KHR = 0x00400000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_KHR = 0x04000000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00800000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_KHR = 0x01000000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT = 0x01000000ULL; typedef struct VkPhysicalDeviceMaintenance5FeaturesKHR { VkStructureType sType; diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp index effa298..bac9933 100644 --- a/include/vulkan/vulkan_enums.hpp +++ b/include/vulkan/vulkan_enums.hpp @@ -6638,21 +6638,18 @@ namespace VULKAN_HPP_NAMESPACE enum class PipelineCreateFlagBits2KHR : VkPipelineCreateFlags2KHR { - eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR, - eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR, - eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eReserved28NV = VK_PIPELINE_CREATE_2_RESERVED_BIT_28_NV, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR, + eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR, + eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR, eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR, - eDeferCompile = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_KHR, + eDeferCompileNV = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV, eCaptureStatistics = VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR, eCaptureInternalRepresentations = VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR, eEarlyReturnOnFailure = VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR, - eLinkTimeOptimization = VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_KHR, - eRetainLinkTimeOptimizationInfo = VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_KHR, + eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT, + eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT, eLibrary = VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR, eRayTracingSkipTriangles = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, eRayTracingSkipAabbs = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR, @@ -6661,16 +6658,17 @@ namespace VULKAN_HPP_NAMESPACE eRayTracingNoNullMissShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, eRayTracingNoNullIntersectionShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, eRayTracingShaderGroupHandleCaptureReplay = VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, - eIndirectBindable = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_KHR, - eRayTracingAllowMotion = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV, + eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV, eRenderingFragmentShadingRateAttachment = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eRenderingFragmentDensityMapAttachment = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_KHR, - eRayTracingOpacityMicromap = VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_KHR, - eColorAttachmentFeedbackLoop = VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR, - eDepthStencilAttachmentFeedbackLoop = VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_KHR, - eNoProtectedAccess = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_KHR, - eProtectedAccessOnly = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_KHR, - eDescriptorBuffer = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_KHR + eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT, + eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eNoProtectedAccessEXT = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT, + eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT, + eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV, + eDescriptorBufferEXT = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT }; using PipelineCreateFlags2KHR = Flags; @@ -6680,22 +6678,20 @@ namespace VULKAN_HPP_NAMESPACE { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2KHR allFlags = - PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | PipelineCreateFlagBits2KHR::eReserved28NV -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompile | + PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative | + PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompileNV | PipelineCreateFlagBits2KHR::eCaptureStatistics | PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations | PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired | PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure | - PipelineCreateFlagBits2KHR::eLinkTimeOptimization | PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo | PipelineCreateFlagBits2KHR::eLibrary | - PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles | PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs | + PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT | PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT | + PipelineCreateFlagBits2KHR::eLibrary | PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles | PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs | PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders | - PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay | PipelineCreateFlagBits2KHR::eIndirectBindable | - PipelineCreateFlagBits2KHR::eRayTracingAllowMotion | PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment | - PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment | PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap | - PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop | PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop | - PipelineCreateFlagBits2KHR::eNoProtectedAccess | PipelineCreateFlagBits2KHR::eProtectedAccessOnly | PipelineCreateFlagBits2KHR::eDescriptorBuffer; + PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay | PipelineCreateFlagBits2KHR::eIndirectBindableNV | + PipelineCreateFlagBits2KHR::eRayTracingAllowMotionNV | PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment | + PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromapEXT | + PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoopEXT | + PipelineCreateFlagBits2KHR::eNoProtectedAccessEXT | PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT | + PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV | PipelineCreateFlagBits2KHR::eDescriptorBufferEXT; }; enum class BufferUsageFlagBits2KHR : VkBufferUsageFlags2KHR @@ -6712,13 +6708,13 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eConditionalRendering = VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_KHR, - eShaderBindingTable = VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR, - eRayTracing = VK_BUFFER_USAGE_2_RAY_TRACING_BIT_KHR, - eTransformFeedbackBuffer = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_KHR, - eTransformFeedbackCounterBuffer = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_KHR, - eVideoDecodeSrc = VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR, - eVideoDecodeDst = VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR, + eConditionalRenderingEXT = VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT, + eShaderBindingTable = VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR, + eRayTracingNV = VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eVideoDecodeSrc = VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDst = VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR, #if defined( VK_ENABLE_BETA_EXTENSIONS ) eVideoEncodeDst = VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR, eVideoEncodeSrc = VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR, @@ -6726,11 +6722,11 @@ namespace VULKAN_HPP_NAMESPACE eShaderDeviceAddress = VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR, eAccelerationStructureBuildInputReadOnly = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, eAccelerationStructureStorage = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, - eSamplerDescriptorBuffer = VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_KHR, - eResourceDescriptorBuffer = VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_KHR, - ePushDescriptorsDescriptorBuffer = VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_KHR, - eMicromapBuildInputReadOnly = VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_KHR, - eMicromapStorage = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_KHR + eSamplerDescriptorBufferEXT = VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT, + eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT, + ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT, + eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT, + eMicromapStorageEXT = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT }; using BufferUsageFlags2KHR = Flags; @@ -6746,15 +6742,16 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) | BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | BufferUsageFlagBits2KHR::eConditionalRendering | BufferUsageFlagBits2KHR::eShaderBindingTable | BufferUsageFlagBits2KHR::eTransformFeedbackBuffer | - BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer | BufferUsageFlagBits2KHR::eVideoDecodeSrc | BufferUsageFlagBits2KHR::eVideoDecodeDst + | BufferUsageFlagBits2KHR::eConditionalRenderingEXT | BufferUsageFlagBits2KHR::eShaderBindingTable | + BufferUsageFlagBits2KHR::eTransformFeedbackBufferEXT | BufferUsageFlagBits2KHR::eTransformFeedbackCounterBufferEXT | + BufferUsageFlagBits2KHR::eVideoDecodeSrc | BufferUsageFlagBits2KHR::eVideoDecodeDst #if defined( VK_ENABLE_BETA_EXTENSIONS ) | BufferUsageFlagBits2KHR::eVideoEncodeDst | BufferUsageFlagBits2KHR::eVideoEncodeSrc #endif /*VK_ENABLE_BETA_EXTENSIONS*/ | BufferUsageFlagBits2KHR::eShaderDeviceAddress | BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly | - BufferUsageFlagBits2KHR::eAccelerationStructureStorage | BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer | - BufferUsageFlagBits2KHR::eResourceDescriptorBuffer | BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer | - BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly | BufferUsageFlagBits2KHR::eMicromapStorage; + BufferUsageFlagBits2KHR::eAccelerationStructureStorage | BufferUsageFlagBits2KHR::eSamplerDescriptorBufferEXT | + BufferUsageFlagBits2KHR::eResourceDescriptorBufferEXT | BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBufferEXT | + BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnlyEXT | BufferUsageFlagBits2KHR::eMicromapStorageEXT; }; //=== VK_EXT_shader_object === diff --git a/include/vulkan/vulkan_to_string.hpp b/include/vulkan/vulkan_to_string.hpp index d04d891..3eac3e8 100644 --- a/include/vulkan/vulkan_to_string.hpp +++ b/include/vulkan/vulkan_to_string.hpp @@ -3321,16 +3321,12 @@ namespace VULKAN_HPP_NAMESPACE result += "AllowDerivatives | "; if ( value & PipelineCreateFlagBits2KHR::eDerivative ) result += "Derivative | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - if ( value & PipelineCreateFlagBits2KHR::eReserved28NV ) - result += "Reserved28NV | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | "; if ( value & PipelineCreateFlagBits2KHR::eDispatchBase ) result += "DispatchBase | "; - if ( value & PipelineCreateFlagBits2KHR::eDeferCompile ) - result += "DeferCompile | "; + if ( value & PipelineCreateFlagBits2KHR::eDeferCompileNV ) + result += "DeferCompileNV | "; if ( value & PipelineCreateFlagBits2KHR::eCaptureStatistics ) result += "CaptureStatistics | "; if ( value & PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations ) @@ -3339,10 +3335,10 @@ namespace VULKAN_HPP_NAMESPACE result += "FailOnPipelineCompileRequired | "; if ( value & PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure ) result += "EarlyReturnOnFailure | "; - if ( value & PipelineCreateFlagBits2KHR::eLinkTimeOptimization ) - result += "LinkTimeOptimization | "; - if ( value & PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo ) - result += "RetainLinkTimeOptimizationInfo | "; + if ( value & PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT ) + result += "LinkTimeOptimizationEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT ) + result += "RetainLinkTimeOptimizationInfoEXT | "; if ( value & PipelineCreateFlagBits2KHR::eLibrary ) result += "Library | "; if ( value & PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles ) @@ -3359,26 +3355,28 @@ namespace VULKAN_HPP_NAMESPACE result += "RayTracingNoNullIntersectionShaders | "; if ( value & PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay ) result += "RayTracingShaderGroupHandleCaptureReplay | "; - if ( value & PipelineCreateFlagBits2KHR::eIndirectBindable ) - result += "IndirectBindable | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingAllowMotion ) - result += "RayTracingAllowMotion | "; + if ( value & PipelineCreateFlagBits2KHR::eIndirectBindableNV ) + result += "IndirectBindableNV | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingAllowMotionNV ) + result += "RayTracingAllowMotionNV | "; if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment ) result += "RenderingFragmentShadingRateAttachment | "; - if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment ) - result += "RenderingFragmentDensityMapAttachment | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap ) - result += "RayTracingOpacityMicromap | "; - if ( value & PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop ) - result += "ColorAttachmentFeedbackLoop | "; - if ( value & PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop ) - result += "DepthStencilAttachmentFeedbackLoop | "; - if ( value & PipelineCreateFlagBits2KHR::eNoProtectedAccess ) - result += "NoProtectedAccess | "; - if ( value & PipelineCreateFlagBits2KHR::eProtectedAccessOnly ) - result += "ProtectedAccessOnly | "; - if ( value & PipelineCreateFlagBits2KHR::eDescriptorBuffer ) - result += "DescriptorBuffer | "; + if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachmentEXT ) + result += "RenderingFragmentDensityMapAttachmentEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromapEXT ) + result += "RayTracingOpacityMicromapEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoopEXT ) + result += "ColorAttachmentFeedbackLoopEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoopEXT ) + result += "DepthStencilAttachmentFeedbackLoopEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eNoProtectedAccessEXT ) + result += "NoProtectedAccessEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT ) + result += "ProtectedAccessOnlyEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV ) + result += "RayTracingDisplacementMicromapNV | "; + if ( value & PipelineCreateFlagBits2KHR::eDescriptorBufferEXT ) + result += "DescriptorBufferEXT | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -3411,14 +3409,14 @@ namespace VULKAN_HPP_NAMESPACE if ( value & BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX ) result += "ExecutionGraphScratchAMDX | "; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - if ( value & BufferUsageFlagBits2KHR::eConditionalRendering ) - result += "ConditionalRendering | "; + if ( value & BufferUsageFlagBits2KHR::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; if ( value & BufferUsageFlagBits2KHR::eShaderBindingTable ) result += "ShaderBindingTable | "; - if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackBuffer ) - result += "TransformFeedbackBuffer | "; - if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer ) - result += "TransformFeedbackCounterBuffer | "; + if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackBufferEXT ) + result += "TransformFeedbackBufferEXT | "; + if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackCounterBufferEXT ) + result += "TransformFeedbackCounterBufferEXT | "; if ( value & BufferUsageFlagBits2KHR::eVideoDecodeSrc ) result += "VideoDecodeSrc | "; if ( value & BufferUsageFlagBits2KHR::eVideoDecodeDst ) @@ -3435,16 +3433,16 @@ namespace VULKAN_HPP_NAMESPACE result += "AccelerationStructureBuildInputReadOnly | "; if ( value & BufferUsageFlagBits2KHR::eAccelerationStructureStorage ) result += "AccelerationStructureStorage | "; - if ( value & BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer ) - result += "SamplerDescriptorBuffer | "; - if ( value & BufferUsageFlagBits2KHR::eResourceDescriptorBuffer ) - result += "ResourceDescriptorBuffer | "; - if ( value & BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer ) - result += "PushDescriptorsDescriptorBuffer | "; - if ( value & BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly ) - result += "MicromapBuildInputReadOnly | "; - if ( value & BufferUsageFlagBits2KHR::eMicromapStorage ) - result += "MicromapStorage | "; + if ( value & BufferUsageFlagBits2KHR::eSamplerDescriptorBufferEXT ) + result += "SamplerDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2KHR::eResourceDescriptorBufferEXT ) + result += "ResourceDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBufferEXT ) + result += "PushDescriptorsDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnlyEXT ) + result += "MicromapBuildInputReadOnlyEXT | "; + if ( value & BufferUsageFlagBits2KHR::eMicromapStorageEXT ) + result += "MicromapStorageEXT | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -8692,18 +8690,15 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits2KHR::eDisableOptimization: return "DisableOptimization"; case PipelineCreateFlagBits2KHR::eAllowDerivatives: return "AllowDerivatives"; case PipelineCreateFlagBits2KHR::eDerivative: return "Derivative"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - case PipelineCreateFlagBits2KHR::eReserved28NV: return "Reserved28NV"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; case PipelineCreateFlagBits2KHR::eDispatchBase: return "DispatchBase"; - case PipelineCreateFlagBits2KHR::eDeferCompile: return "DeferCompile"; + case PipelineCreateFlagBits2KHR::eDeferCompileNV: return "DeferCompileNV"; case PipelineCreateFlagBits2KHR::eCaptureStatistics: return "CaptureStatistics"; case PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations: return "CaptureInternalRepresentations"; case PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; case PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; - case PipelineCreateFlagBits2KHR::eLinkTimeOptimization: return "LinkTimeOptimization"; - case PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfo: return "RetainLinkTimeOptimizationInfo"; + case PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT: return "LinkTimeOptimizationEXT"; + case PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT: return "RetainLinkTimeOptimizationInfoEXT"; case PipelineCreateFlagBits2KHR::eLibrary: return "Library"; case PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles: return "RayTracingSkipTriangles"; case PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs: return "RayTracingSkipAabbs"; @@ -8712,16 +8707,17 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders: return "RayTracingNoNullMissShaders"; case PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders: return "RayTracingNoNullIntersectionShaders"; case PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay: return "RayTracingShaderGroupHandleCaptureReplay"; - case PipelineCreateFlagBits2KHR::eIndirectBindable: return "IndirectBindable"; - case PipelineCreateFlagBits2KHR::eRayTracingAllowMotion: return "RayTracingAllowMotion"; + case PipelineCreateFlagBits2KHR::eIndirectBindableNV: return "IndirectBindableNV"; + case PipelineCreateFlagBits2KHR::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV"; case PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment: return "RenderingFragmentShadingRateAttachment"; - case PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachment: return "RenderingFragmentDensityMapAttachment"; - case PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromap: return "RayTracingOpacityMicromap"; - case PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoop: return "ColorAttachmentFeedbackLoop"; - case PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoop: return "DepthStencilAttachmentFeedbackLoop"; - case PipelineCreateFlagBits2KHR::eNoProtectedAccess: return "NoProtectedAccess"; - case PipelineCreateFlagBits2KHR::eProtectedAccessOnly: return "ProtectedAccessOnly"; - case PipelineCreateFlagBits2KHR::eDescriptorBuffer: return "DescriptorBuffer"; + case PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromapEXT: return "RayTracingOpacityMicromapEXT"; + case PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoopEXT: return "ColorAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoopEXT: return "DepthStencilAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits2KHR::eNoProtectedAccessEXT: return "NoProtectedAccessEXT"; + case PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT: return "ProtectedAccessOnlyEXT"; + case PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; + case PipelineCreateFlagBits2KHR::eDescriptorBufferEXT: return "DescriptorBufferEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8742,10 +8738,10 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) case BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - case BufferUsageFlagBits2KHR::eConditionalRendering: return "ConditionalRendering"; + case BufferUsageFlagBits2KHR::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; case BufferUsageFlagBits2KHR::eShaderBindingTable: return "ShaderBindingTable"; - case BufferUsageFlagBits2KHR::eTransformFeedbackBuffer: return "TransformFeedbackBuffer"; - case BufferUsageFlagBits2KHR::eTransformFeedbackCounterBuffer: return "TransformFeedbackCounterBuffer"; + case BufferUsageFlagBits2KHR::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits2KHR::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; case BufferUsageFlagBits2KHR::eVideoDecodeSrc: return "VideoDecodeSrc"; case BufferUsageFlagBits2KHR::eVideoDecodeDst: return "VideoDecodeDst"; #if defined( VK_ENABLE_BETA_EXTENSIONS ) @@ -8755,11 +8751,11 @@ namespace VULKAN_HPP_NAMESPACE case BufferUsageFlagBits2KHR::eShaderDeviceAddress: return "ShaderDeviceAddress"; case BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly: return "AccelerationStructureBuildInputReadOnly"; case BufferUsageFlagBits2KHR::eAccelerationStructureStorage: return "AccelerationStructureStorage"; - case BufferUsageFlagBits2KHR::eSamplerDescriptorBuffer: return "SamplerDescriptorBuffer"; - case BufferUsageFlagBits2KHR::eResourceDescriptorBuffer: return "ResourceDescriptorBuffer"; - case BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBuffer: return "PushDescriptorsDescriptorBuffer"; - case BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnly: return "MicromapBuildInputReadOnly"; - case BufferUsageFlagBits2KHR::eMicromapStorage: return "MicromapStorage"; + case BufferUsageFlagBits2KHR::eSamplerDescriptorBufferEXT: return "SamplerDescriptorBufferEXT"; + case BufferUsageFlagBits2KHR::eResourceDescriptorBufferEXT: return "ResourceDescriptorBufferEXT"; + case BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBufferEXT: return "PushDescriptorsDescriptorBufferEXT"; + case BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT"; + case BufferUsageFlagBits2KHR::eMicromapStorageEXT: return "MicromapStorageEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } diff --git a/registry/validusage.json b/registry/validusage.json index d44738f..cbea59d 100644 --- a/registry/validusage.json +++ b/registry/validusage.json @@ -1,9 +1,9 @@ { "version info": { "schema version": 2, - "api version": "1.3.260", - "comment": "from git branch: github-main commit: 12ab5855b1608e4b05b270e0dedecd1b1a5458f8", - "date": "2023-07-28 10:25:36Z" + "api version": "1.3.261", + "comment": "from git branch: github-main commit: b9aad705f0d9e5e6734ac2ad671d5d1de57b05e0", + "date": "2023-08-04 10:29:09Z" }, "validation": { "vkGetInstanceProcAddr": { @@ -12032,6 +12032,10 @@ "vuid": "VUID-VkGraphicsPipelineCreateInfo-pVertexInputState-08930", "text": " If the pipeline is being created with vertex input state and pVertexInputState is not dynamic, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription::format must have a 64-bit component" }, + { + "vuid": "VUID-VkGraphicsPipelineCreateInfo-pVertexInputState-09198", + "text": " If the pipeline is being created with vertex input state and pVertexInputState is not dynamic, and VkVertexInputAttributeDescription::format has a 64-bit component, then all Input variables at the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must not use components that are not present in the format" + }, { "vuid": "VUID-VkGraphicsPipelineCreateInfo-dynamicPrimitiveTopologyUnrestricted-09031", "text": " If the pipeline requires vertex input state, and the VK_EXT_extended_dynamic_state3 extension is not enabled, or either VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, or VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY dynamic states are not set, or dynamicPrimitiveTopologyUnrestricted is VK_FALSE, pInputAssemblyState must be a valid pointer to a valid VkPipelineInputAssemblyStateCreateInfo structure" @@ -19744,13 +19748,21 @@ "vuid": "VUID-vkBindBufferMemory-bufferDeviceAddress-03339", "text": " If the VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress feature is enabled and buffer was created with the VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT bit set" }, + { + "vuid": "VUID-vkBindBufferMemory-bufferDeviceAddressCaptureReplay-09200", + "text": " If the VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddressCaptureReplay feature is enabled and buffer was created with the VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT bit set" + }, { "vuid": "VUID-vkBindBufferMemory-buffer-06408", "text": " If buffer was created with VkBufferCollectionBufferCreateInfoFUCHSIA chained to VkBufferCreateInfo::pNext, memory must be allocated with a VkImportMemoryBufferCollectionFUCHSIA chained to VkMemoryAllocateInfo::pNext" }, { "vuid": "VUID-vkBindBufferMemory-descriptorBufferCaptureReplay-08112", - "text": " If the VkPhysicalDeviceDescriptorBufferFeaturesEXT ::descriptorBufferCaptureReplay feature is enabled and buffer was created with the VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT bit set" + "text": " If the buffer was created with the VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT bit set" + }, + { + "vuid": "VUID-vkBindBufferMemory-buffer-09201", + "text": " If the buffer was created with the VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT bit set" }, { "vuid": "VUID-vkBindBufferMemory-device-parameter", @@ -19856,13 +19868,21 @@ "vuid": "VUID-VkBindBufferMemoryInfo-bufferDeviceAddress-03339", "text": " If the VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress feature is enabled and buffer was created with the VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT bit set" }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-bufferDeviceAddressCaptureReplay-09200", + "text": " If the VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddressCaptureReplay feature is enabled and buffer was created with the VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT bit set" + }, { "vuid": "VUID-VkBindBufferMemoryInfo-buffer-06408", "text": " If buffer was created with VkBufferCollectionBufferCreateInfoFUCHSIA chained to VkBufferCreateInfo::pNext, memory must be allocated with a VkImportMemoryBufferCollectionFUCHSIA chained to VkMemoryAllocateInfo::pNext" }, { "vuid": "VUID-VkBindBufferMemoryInfo-descriptorBufferCaptureReplay-08112", - "text": " If the VkPhysicalDeviceDescriptorBufferFeaturesEXT ::descriptorBufferCaptureReplay feature is enabled and buffer was created with the VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT bit set" + "text": " If the buffer was created with the VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT bit set" + }, + { + "vuid": "VUID-VkBindBufferMemoryInfo-buffer-09201", + "text": " If the buffer was created with the VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT bit set" }, { "vuid": "VUID-VkBindBufferMemoryInfo-pNext-01605", @@ -19970,7 +19990,11 @@ }, { "vuid": "VUID-vkBindImageMemory-descriptorBufferCaptureReplay-08113", - "text": " If the VkPhysicalDeviceDescriptorBufferFeaturesEXT ::descriptorBufferCaptureReplay feature is enabled and image was created with the VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT bit set" + "text": " If the image was created with the VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT bit set" + }, + { + "vuid": "VUID-vkBindImageMemory-image-09202", + "text": " If the image was created with the VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT bit set" }, { "vuid": "VUID-vkBindImageMemory-image-01608", @@ -20094,7 +20118,11 @@ }, { "vuid": "VUID-VkBindImageMemoryInfo-descriptorBufferCaptureReplay-08113", - "text": " If the VkPhysicalDeviceDescriptorBufferFeaturesEXT ::descriptorBufferCaptureReplay feature is enabled and image was created with the VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT bit set" + "text": " If the image was created with the VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT bit set" + }, + { + "vuid": "VUID-VkBindImageMemoryInfo-image-09202", + "text": " If the image was created with the VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT bit set, memory must have been allocated with the VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT bit set" }, { "vuid": "VUID-VkBindImageMemoryInfo-pNext-01615", @@ -26430,7 +26458,7 @@ }, { "vuid": "VUID-vkGetQueryPoolResults-queryType-03230", - "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, flags must not contain VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, VK_QUERY_RESULT_PARTIAL_BIT or VK_QUERY_RESULT_64_BIT" + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, flags must not contain VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, VK_QUERY_RESULT_WITH_STATUS_BIT_KHR, VK_QUERY_RESULT_PARTIAL_BIT, or VK_QUERY_RESULT_64_BIT" }, { "vuid": "VUID-vkGetQueryPoolResults-queryType-03231", @@ -26514,7 +26542,7 @@ }, { "vuid": "VUID-vkCmdCopyQueryPoolResults-queryType-03233", - "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, flags must not contain VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, VK_QUERY_RESULT_PARTIAL_BIT or VK_QUERY_RESULT_64_BIT" + "text": " If the queryType used to create queryPool was VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR, flags must not contain VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, VK_QUERY_RESULT_WITH_STATUS_BIT_KHR, VK_QUERY_RESULT_PARTIAL_BIT, or VK_QUERY_RESULT_64_BIT" }, { "vuid": "VUID-vkCmdCopyQueryPoolResults-queryType-03234", @@ -27114,7 +27142,7 @@ }, { "vuid": "VUID-vkCmdClearColorImage-imageLayout-01394", - "text": " imageLayout must be VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, or VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR" + "text": " imageLayout must be VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL or VK_IMAGE_LAYOUT_GENERAL" }, { "vuid": "VUID-vkCmdClearColorImage-aspectMask-02498", @@ -32490,11 +32518,15 @@ }, { "vuid": "VUID-vkCmdDraw-format-08936", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" }, { "vuid": "VUID-vkCmdDraw-format-08937", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + }, + { + "vuid": "VUID-vkCmdDraw-None-09203", + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then all Input variables at the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must not use components that are not present in the format" }, { "vuid": "VUID-vkCmdDraw-None-08882", @@ -33806,11 +33838,15 @@ }, { "vuid": "VUID-vkCmdDrawIndexed-format-08936", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" }, { "vuid": "VUID-vkCmdDrawIndexed-format-08937", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + }, + { + "vuid": "VUID-vkCmdDrawIndexed-None-09203", + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then all Input variables at the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must not use components that are not present in the format" }, { "vuid": "VUID-vkCmdDrawIndexed-None-08882", @@ -35134,11 +35170,15 @@ }, { "vuid": "VUID-vkCmdDrawMultiEXT-format-08936", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" }, { "vuid": "VUID-vkCmdDrawMultiEXT-format-08937", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + }, + { + "vuid": "VUID-vkCmdDrawMultiEXT-None-09203", + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then all Input variables at the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must not use components that are not present in the format" }, { "vuid": "VUID-vkCmdDrawMultiEXT-None-08882", @@ -36466,11 +36506,15 @@ }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-format-08936", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-format-08937", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + }, + { + "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-09203", + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then all Input variables at the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must not use components that are not present in the format" }, { "vuid": "VUID-vkCmdDrawMultiIndexedEXT-None-08882", @@ -37802,11 +37846,15 @@ }, { "vuid": "VUID-vkCmdDrawIndirect-format-08936", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" }, { "vuid": "VUID-vkCmdDrawIndirect-format-08937", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + }, + { + "vuid": "VUID-vkCmdDrawIndirect-None-09203", + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then all Input variables at the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must not use components that are not present in the format" }, { "vuid": "VUID-vkCmdDrawIndirect-None-08882", @@ -39162,11 +39210,15 @@ }, { "vuid": "VUID-vkCmdDrawIndirectCount-format-08936", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" }, { "vuid": "VUID-vkCmdDrawIndirectCount-format-08937", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + }, + { + "vuid": "VUID-vkCmdDrawIndirectCount-None-09203", + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then all Input variables at the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must not use components that are not present in the format" }, { "vuid": "VUID-vkCmdDrawIndirectCount-None-08882", @@ -40534,11 +40586,15 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-format-08936", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-format-08937", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirect-None-09203", + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then all Input variables at the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must not use components that are not present in the format" }, { "vuid": "VUID-vkCmdDrawIndexedIndirect-None-08882", @@ -41906,11 +41962,15 @@ }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-format-08936", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-format-08937", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + }, + { + "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-09203", + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then all Input variables at the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must not use components that are not present in the format" }, { "vuid": "VUID-vkCmdDrawIndexedIndirectCount-None-08882", @@ -43286,11 +43346,15 @@ }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-format-08936", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-format-08937", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + }, + { + "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-09203", + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then all Input variables at the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must not use components that are not present in the format" }, { "vuid": "VUID-vkCmdDrawIndirectByteCountEXT-None-08882", @@ -59250,6 +59314,30 @@ } ] }, + "vkGetPipelineIndirectMemoryRequirementsNV": { + "core": [ + { + "vuid": "VUID-vkGetPipelineIndirectMemoryRequirementsNV-deviceGeneratedComputePipelines-09082", + "text": " The VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV::deviceGeneratedComputePipelines feature must be enabled" + }, + { + "vuid": "VUID-vkGetPipelineIndirectMemoryRequirementsNV-pCreateInfo-09083", + "text": " pCreateInfo::flags must include VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV" + }, + { + "vuid": "VUID-vkGetPipelineIndirectMemoryRequirementsNV-device-parameter", + "text": " device must be a valid VkDevice handle" + }, + { + "vuid": "VUID-vkGetPipelineIndirectMemoryRequirementsNV-pCreateInfo-parameter", + "text": " pCreateInfo must be a valid pointer to a valid VkComputePipelineCreateInfo structure" + }, + { + "vuid": "VUID-vkGetPipelineIndirectMemoryRequirementsNV-pMemoryRequirements-parameter", + "text": " pMemoryRequirements must be a valid pointer to a VkMemoryRequirements2 structure" + } + ] + }, "vkCmdExecuteGeneratedCommandsNV": { "core": [ { @@ -60498,11 +60586,15 @@ }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-format-08936", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then the scalar width associated with all Input variables of the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must be 64-bit" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-format-08937", - "text": " If the bound graphics pipeline state was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and the scalar width associated with a Location decorated Input variable in the Vertex {ExecutionModel} OpEntryPoint is 64-bit, then the corresponding VkVertexInputAttributeDescription2EXT::format must have a 64-bit component" + }, + { + "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-09203", + "text": " If there is a shader object bound to a graphics stage or the currently bound graphics pipeline was created with the VK_DYNAMIC_STATE_VERTEX_INPUT_EXT dynamic state enabled, and VkVertexInputAttributeDescription2EXT::format has a 64-bit component, then all Input variables at the corresponding Location in the Vertex {ExecutionModel} OpEntryPoint must not use components that are not present in the format" }, { "vuid": "VUID-vkCmdExecuteGeneratedCommandsNV-None-08882", @@ -63756,6 +63848,10 @@ "vuid": "VUID-VkPresentInfoKHR-pNext-06235", "text": " If a VkPresentIdKHR structure is included in the pNext chain, and the presentId feature is not enabled, each presentIds entry in that structure must be NULL" }, + { + "vuid": "VUID-VkPresentInfoKHR-pSwapchains-09199", + "text": " If any element of the pSwapchains array has been created with VkSwapchainPresentModesCreateInfoEXT, all of the elements of this array must be created with VkSwapchainPresentModesCreateInfoEXT" + }, { "vuid": "VUID-VkPresentInfoKHR-sType-sType", "text": " sType must be VK_STRUCTURE_TYPE_PRESENT_INFO_KHR" diff --git a/registry/vk.xml b/registry/vk.xml index 898bd9f..28669af 100644 --- a/registry/vk.xml +++ b/registry/vk.xml @@ -175,7 +175,7 @@ branch of the member gitlab server. #define VKSC_API_VERSION_1_0 VK_MAKE_API_VERSION(VKSC_API_VARIANT, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 260 +#define VK_HEADER_VERSION 261 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) // Version of this file @@ -21658,9 +21658,6 @@ typedef void* MTLSharedEvent_id; - - - @@ -22395,7 +22392,7 @@ typedef void* MTLSharedEvent_id; - + @@ -22433,7 +22430,7 @@ typedef void* MTLSharedEvent_id; - + @@ -22444,8 +22441,8 @@ typedef void* MTLSharedEvent_id; - - + + @@ -22460,43 +22457,46 @@ typedef void* MTLSharedEvent_id; - + - + - + - + - - + + - - + + + + + - + - + - + - - + + @@ -22514,13 +22514,13 @@ typedef void* MTLSharedEvent_id; - - - + + + - - + + @@ -23179,6 +23179,12 @@ typedef void* MTLSharedEvent_id; + + + + + + @@ -24792,6 +24798,9 @@ typedef void* MTLSharedEvent_id; + + +