Update for Vulkan-Docs 1.4.317

This commit is contained in:
Jon Leech 2025-06-06 13:47:45 +01:00 committed by Jon Leech
parent b11eecd68f
commit 2642d51e1e
23 changed files with 12386 additions and 834 deletions

View file

@ -0,0 +1,151 @@
#ifndef VULKAN_VIDEO_CODEC_VP9STD_H_
#define VULKAN_VIDEO_CODEC_VP9STD_H_ 1
/*
** Copyright 2015-2025 The Khronos Group Inc.
**
** SPDX-License-Identifier: Apache-2.0
*/
/*
** This header is generated from the Khronos Vulkan XML API Registry.
**
*/
#ifdef __cplusplus
extern "C" {
#endif
// vulkan_video_codec_vp9std is a preprocessor guard. Do not pass it to API calls.
#define vulkan_video_codec_vp9std 1
#include "vulkan_video_codecs_common.h"
#define STD_VIDEO_VP9_NUM_REF_FRAMES 8
#define STD_VIDEO_VP9_REFS_PER_FRAME 3
#define STD_VIDEO_VP9_MAX_REF_FRAMES 4
#define STD_VIDEO_VP9_LOOP_FILTER_ADJUSTMENTS 2
#define STD_VIDEO_VP9_MAX_SEGMENTS 8
#define STD_VIDEO_VP9_SEG_LVL_MAX 4
#define STD_VIDEO_VP9_MAX_SEGMENTATION_TREE_PROBS 7
#define STD_VIDEO_VP9_MAX_SEGMENTATION_PRED_PROB 3
typedef enum StdVideoVP9Profile {
STD_VIDEO_VP9_PROFILE_0 = 0,
STD_VIDEO_VP9_PROFILE_1 = 1,
STD_VIDEO_VP9_PROFILE_2 = 2,
STD_VIDEO_VP9_PROFILE_3 = 3,
STD_VIDEO_VP9_PROFILE_INVALID = 0x7FFFFFFF,
STD_VIDEO_VP9_PROFILE_MAX_ENUM = 0x7FFFFFFF
} StdVideoVP9Profile;
typedef enum StdVideoVP9Level {
STD_VIDEO_VP9_LEVEL_1_0 = 0,
STD_VIDEO_VP9_LEVEL_1_1 = 1,
STD_VIDEO_VP9_LEVEL_2_0 = 2,
STD_VIDEO_VP9_LEVEL_2_1 = 3,
STD_VIDEO_VP9_LEVEL_3_0 = 4,
STD_VIDEO_VP9_LEVEL_3_1 = 5,
STD_VIDEO_VP9_LEVEL_4_0 = 6,
STD_VIDEO_VP9_LEVEL_4_1 = 7,
STD_VIDEO_VP9_LEVEL_5_0 = 8,
STD_VIDEO_VP9_LEVEL_5_1 = 9,
STD_VIDEO_VP9_LEVEL_5_2 = 10,
STD_VIDEO_VP9_LEVEL_6_0 = 11,
STD_VIDEO_VP9_LEVEL_6_1 = 12,
STD_VIDEO_VP9_LEVEL_6_2 = 13,
STD_VIDEO_VP9_LEVEL_INVALID = 0x7FFFFFFF,
STD_VIDEO_VP9_LEVEL_MAX_ENUM = 0x7FFFFFFF
} StdVideoVP9Level;
typedef enum StdVideoVP9FrameType {
STD_VIDEO_VP9_FRAME_TYPE_KEY = 0,
STD_VIDEO_VP9_FRAME_TYPE_NON_KEY = 1,
STD_VIDEO_VP9_FRAME_TYPE_INVALID = 0x7FFFFFFF,
STD_VIDEO_VP9_FRAME_TYPE_MAX_ENUM = 0x7FFFFFFF
} StdVideoVP9FrameType;
typedef enum StdVideoVP9ReferenceName {
STD_VIDEO_VP9_REFERENCE_NAME_INTRA_FRAME = 0,
STD_VIDEO_VP9_REFERENCE_NAME_LAST_FRAME = 1,
STD_VIDEO_VP9_REFERENCE_NAME_GOLDEN_FRAME = 2,
STD_VIDEO_VP9_REFERENCE_NAME_ALTREF_FRAME = 3,
STD_VIDEO_VP9_REFERENCE_NAME_INVALID = 0x7FFFFFFF,
STD_VIDEO_VP9_REFERENCE_NAME_MAX_ENUM = 0x7FFFFFFF
} StdVideoVP9ReferenceName;
typedef enum StdVideoVP9InterpolationFilter {
STD_VIDEO_VP9_INTERPOLATION_FILTER_EIGHTTAP = 0,
STD_VIDEO_VP9_INTERPOLATION_FILTER_EIGHTTAP_SMOOTH = 1,
STD_VIDEO_VP9_INTERPOLATION_FILTER_EIGHTTAP_SHARP = 2,
STD_VIDEO_VP9_INTERPOLATION_FILTER_BILINEAR = 3,
STD_VIDEO_VP9_INTERPOLATION_FILTER_SWITCHABLE = 4,
STD_VIDEO_VP9_INTERPOLATION_FILTER_INVALID = 0x7FFFFFFF,
STD_VIDEO_VP9_INTERPOLATION_FILTER_MAX_ENUM = 0x7FFFFFFF
} StdVideoVP9InterpolationFilter;
typedef enum StdVideoVP9ColorSpace {
STD_VIDEO_VP9_COLOR_SPACE_UNKNOWN = 0,
STD_VIDEO_VP9_COLOR_SPACE_BT_601 = 1,
STD_VIDEO_VP9_COLOR_SPACE_BT_709 = 2,
STD_VIDEO_VP9_COLOR_SPACE_SMPTE_170 = 3,
STD_VIDEO_VP9_COLOR_SPACE_SMPTE_240 = 4,
STD_VIDEO_VP9_COLOR_SPACE_BT_2020 = 5,
STD_VIDEO_VP9_COLOR_SPACE_RESERVED = 6,
STD_VIDEO_VP9_COLOR_SPACE_RGB = 7,
STD_VIDEO_VP9_COLOR_SPACE_INVALID = 0x7FFFFFFF,
STD_VIDEO_VP9_COLOR_SPACE_MAX_ENUM = 0x7FFFFFFF
} StdVideoVP9ColorSpace;
typedef struct StdVideoVP9ColorConfigFlags {
uint32_t color_range : 1;
uint32_t reserved : 31;
} StdVideoVP9ColorConfigFlags;
typedef struct StdVideoVP9ColorConfig {
StdVideoVP9ColorConfigFlags flags;
uint8_t BitDepth;
uint8_t subsampling_x;
uint8_t subsampling_y;
uint8_t reserved1;
StdVideoVP9ColorSpace color_space;
} StdVideoVP9ColorConfig;
typedef struct StdVideoVP9LoopFilterFlags {
uint32_t loop_filter_delta_enabled : 1;
uint32_t loop_filter_delta_update : 1;
uint32_t reserved : 30;
} StdVideoVP9LoopFilterFlags;
typedef struct StdVideoVP9LoopFilter {
StdVideoVP9LoopFilterFlags flags;
uint8_t loop_filter_level;
uint8_t loop_filter_sharpness;
uint8_t update_ref_delta;
int8_t loop_filter_ref_deltas[STD_VIDEO_VP9_MAX_REF_FRAMES];
uint8_t update_mode_delta;
int8_t loop_filter_mode_deltas[STD_VIDEO_VP9_LOOP_FILTER_ADJUSTMENTS];
} StdVideoVP9LoopFilter;
typedef struct StdVideoVP9SegmentationFlags {
uint32_t segmentation_update_map : 1;
uint32_t segmentation_temporal_update : 1;
uint32_t segmentation_update_data : 1;
uint32_t segmentation_abs_or_delta_update : 1;
uint32_t reserved : 28;
} StdVideoVP9SegmentationFlags;
typedef struct StdVideoVP9Segmentation {
StdVideoVP9SegmentationFlags flags;
uint8_t segmentation_tree_probs[STD_VIDEO_VP9_MAX_SEGMENTATION_TREE_PROBS];
uint8_t segmentation_pred_prob[STD_VIDEO_VP9_MAX_SEGMENTATION_PRED_PROB];
uint8_t FeatureEnabled[STD_VIDEO_VP9_MAX_SEGMENTS];
int16_t FeatureData[STD_VIDEO_VP9_MAX_SEGMENTS][STD_VIDEO_VP9_SEG_LVL_MAX];
} StdVideoVP9Segmentation;
#ifdef __cplusplus
}
#endif
#endif

View file

@ -0,0 +1,68 @@
#ifndef VULKAN_VIDEO_CODEC_VP9STD_DECODE_H_
#define VULKAN_VIDEO_CODEC_VP9STD_DECODE_H_ 1
/*
** Copyright 2015-2025 The Khronos Group Inc.
**
** SPDX-License-Identifier: Apache-2.0
*/
/*
** This header is generated from the Khronos Vulkan XML API Registry.
**
*/
#ifdef __cplusplus
extern "C" {
#endif
// vulkan_video_codec_vp9std_decode is a preprocessor guard. Do not pass it to API calls.
#define vulkan_video_codec_vp9std_decode 1
#include "vulkan_video_codec_vp9std.h"
#define VK_STD_VULKAN_VIDEO_CODEC_VP9_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0)
#define VK_STD_VULKAN_VIDEO_CODEC_VP9_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_VP9_DECODE_API_VERSION_1_0_0
#define VK_STD_VULKAN_VIDEO_CODEC_VP9_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_vp9_decode"
typedef struct StdVideoDecodeVP9PictureInfoFlags {
uint32_t error_resilient_mode : 1;
uint32_t intra_only : 1;
uint32_t allow_high_precision_mv : 1;
uint32_t refresh_frame_context : 1;
uint32_t frame_parallel_decoding_mode : 1;
uint32_t segmentation_enabled : 1;
uint32_t show_frame : 1;
uint32_t UsePrevFrameMvs : 1;
uint32_t reserved : 24;
} StdVideoDecodeVP9PictureInfoFlags;
typedef struct StdVideoDecodeVP9PictureInfo {
StdVideoDecodeVP9PictureInfoFlags flags;
StdVideoVP9Profile profile;
StdVideoVP9FrameType frame_type;
uint8_t frame_context_idx;
uint8_t reset_frame_context;
uint8_t refresh_frame_flags;
uint8_t ref_frame_sign_bias_mask;
StdVideoVP9InterpolationFilter interpolation_filter;
uint8_t base_q_idx;
int8_t delta_q_y_dc;
int8_t delta_q_uv_dc;
int8_t delta_q_uv_ac;
uint8_t tile_cols_log2;
uint8_t tile_rows_log2;
uint16_t reserved1[3];
const StdVideoVP9ColorConfig* pColorConfig;
const StdVideoVP9LoopFilter* pLoopFilter;
const StdVideoVP9Segmentation* pSegmentation;
} StdVideoDecodeVP9PictureInfo;
#ifdef __cplusplus
}
#endif
#endif

View file

@ -151,11 +151,11 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::EventCreateFlags;
using VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlagBits;
using VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags;
using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlagBits;
using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags;
using VULKAN_HPP_NAMESPACE::QueryResultFlagBits;
using VULKAN_HPP_NAMESPACE::QueryResultFlags;
using VULKAN_HPP_NAMESPACE::QueryType;
using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlagBits;
using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags;
using VULKAN_HPP_NAMESPACE::BufferCreateFlagBits;
using VULKAN_HPP_NAMESPACE::BufferCreateFlags;
using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits;
@ -859,6 +859,15 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagBitsLUNARG;
using VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG;
//=== VK_ARM_tensors ===
using VULKAN_HPP_NAMESPACE::TensorCreateFlagBitsARM;
using VULKAN_HPP_NAMESPACE::TensorCreateFlagsARM;
using VULKAN_HPP_NAMESPACE::TensorViewCreateFlagBitsARM;
using VULKAN_HPP_NAMESPACE::TensorViewCreateFlagsARM;
using VULKAN_HPP_NAMESPACE::TensorUsageFlagBitsARM;
using VULKAN_HPP_NAMESPACE::TensorUsageFlagsARM;
using VULKAN_HPP_NAMESPACE::TensorTilingARM;
//=== VK_NV_optical_flow ===
using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagBitsNV;
using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV;
@ -963,6 +972,9 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
//=== VK_EXT_depth_clamp_control ===
using VULKAN_HPP_NAMESPACE::DepthClampModeEXT;
//=== VK_KHR_maintenance9 ===
using VULKAN_HPP_NAMESPACE::DefaultVertexAttributeValueKHR;
//=========================
//=== Index Type Traits ===
//=========================
@ -2462,6 +2474,10 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::LUNARGDirectDriverLoadingExtensionName;
using VULKAN_HPP_NAMESPACE::LUNARGDirectDriverLoadingSpecVersion;
//=== VK_ARM_tensors ===
using VULKAN_HPP_NAMESPACE::ARMTensorsExtensionName;
using VULKAN_HPP_NAMESPACE::ARMTensorsSpecVersion;
//=== VK_EXT_shader_module_identifier ===
using VULKAN_HPP_NAMESPACE::MaxShaderModuleIdentifierSizeEXT;
using VULKAN_HPP_NAMESPACE::EXTShaderModuleIdentifierExtensionName;
@ -2497,6 +2513,14 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::AMDAntiLagExtensionName;
using VULKAN_HPP_NAMESPACE::AMDAntiLagSpecVersion;
//=== VK_KHR_present_id2 ===
using VULKAN_HPP_NAMESPACE::KHRPresentId2ExtensionName;
using VULKAN_HPP_NAMESPACE::KHRPresentId2SpecVersion;
//=== VK_KHR_present_wait2 ===
using VULKAN_HPP_NAMESPACE::KHRPresentWait2ExtensionName;
using VULKAN_HPP_NAMESPACE::KHRPresentWait2SpecVersion;
//=== VK_KHR_ray_tracing_position_fetch ===
using VULKAN_HPP_NAMESPACE::KHRRayTracingPositionFetchExtensionName;
using VULKAN_HPP_NAMESPACE::KHRRayTracingPositionFetchSpecVersion;
@ -2583,6 +2607,11 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::KHRVideoEncodeAv1ExtensionName;
using VULKAN_HPP_NAMESPACE::KHRVideoEncodeAv1SpecVersion;
//=== VK_KHR_video_decode_vp9 ===
using VULKAN_HPP_NAMESPACE::MaxVideoVp9ReferencesPerFrameKHR;
using VULKAN_HPP_NAMESPACE::KHRVideoDecodeVp9ExtensionName;
using VULKAN_HPP_NAMESPACE::KHRVideoDecodeVp9SpecVersion;
//=== VK_KHR_video_maintenance1 ===
using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance1ExtensionName;
using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance1SpecVersion;
@ -2619,6 +2648,10 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::KHRLoadStoreOpNoneExtensionName;
using VULKAN_HPP_NAMESPACE::KHRLoadStoreOpNoneSpecVersion;
//=== VK_KHR_unified_image_layouts ===
using VULKAN_HPP_NAMESPACE::KHRUnifiedImageLayoutsExtensionName;
using VULKAN_HPP_NAMESPACE::KHRUnifiedImageLayoutsSpecVersion;
//=== VK_KHR_shader_float_controls2 ===
using VULKAN_HPP_NAMESPACE::KHRShaderFloatControls2ExtensionName;
using VULKAN_HPP_NAMESPACE::KHRShaderFloatControls2SpecVersion;
@ -2697,6 +2730,10 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::EXTShaderReplicatedCompositesExtensionName;
using VULKAN_HPP_NAMESPACE::EXTShaderReplicatedCompositesSpecVersion;
//=== VK_EXT_shader_float8 ===
using VULKAN_HPP_NAMESPACE::EXTShaderFloat8ExtensionName;
using VULKAN_HPP_NAMESPACE::EXTShaderFloat8SpecVersion;
//=== VK_NV_ray_tracing_validation ===
using VULKAN_HPP_NAMESPACE::NVRayTracingValidationExtensionName;
using VULKAN_HPP_NAMESPACE::NVRayTracingValidationSpecVersion;
@ -2726,6 +2763,10 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::EXTDepthClampControlExtensionName;
using VULKAN_HPP_NAMESPACE::EXTDepthClampControlSpecVersion;
//=== VK_KHR_maintenance9 ===
using VULKAN_HPP_NAMESPACE::KHRMaintenance9ExtensionName;
using VULKAN_HPP_NAMESPACE::KHRMaintenance9SpecVersion;
//=== VK_KHR_video_maintenance2 ===
using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance2ExtensionName;
using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance2SpecVersion;
@ -4515,6 +4556,32 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG;
using VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG;
//=== VK_ARM_tensors ===
using VULKAN_HPP_NAMESPACE::TensorDescriptionARM;
using VULKAN_HPP_NAMESPACE::TensorCreateInfoARM;
using VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM;
using VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM;
using VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM;
using VULKAN_HPP_NAMESPACE::WriteDescriptorSetTensorARM;
using VULKAN_HPP_NAMESPACE::TensorFormatPropertiesARM;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorPropertiesARM;
using VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM;
using VULKAN_HPP_NAMESPACE::TensorDependencyInfoARM;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorFeaturesARM;
using VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM;
using VULKAN_HPP_NAMESPACE::CopyTensorInfoARM;
using VULKAN_HPP_NAMESPACE::TensorCopyARM;
using VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoTensorARM;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM;
using VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM;
using VULKAN_HPP_NAMESPACE::ExternalMemoryTensorCreateInfoARM;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorFeaturesARM;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorPropertiesARM;
using VULKAN_HPP_NAMESPACE::DescriptorGetTensorInfoARM;
using VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM;
using VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM;
using VULKAN_HPP_NAMESPACE::FrameBoundaryTensorsARM;
//=== VK_EXT_shader_module_identifier ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
@ -4549,6 +4616,16 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::AntiLagDataAMD;
using VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD;
//=== VK_KHR_present_id2 ===
using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentId2KHR;
using VULKAN_HPP_NAMESPACE::PresentId2KHR;
using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentId2FeaturesKHR;
//=== VK_KHR_present_wait2 ===
using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentWait2KHR;
using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWait2FeaturesKHR;
using VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR;
//=== VK_KHR_ray_tracing_position_fetch ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR;
@ -4669,6 +4746,12 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlInfoKHR;
using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR;
//=== VK_KHR_video_decode_vp9 ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoDecodeVP9FeaturesKHR;
using VULKAN_HPP_NAMESPACE::VideoDecodeVP9ProfileInfoKHR;
using VULKAN_HPP_NAMESPACE::VideoDecodeVP9CapabilitiesKHR;
using VULKAN_HPP_NAMESPACE::VideoDecodeVP9PictureInfoKHR;
//=== VK_KHR_video_maintenance1 ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR;
using VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR;
@ -4696,6 +4779,10 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
//=== VK_EXT_attachment_feedback_loop_dynamic_state ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;
//=== VK_KHR_unified_image_layouts ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceUnifiedImageLayoutsFeaturesKHR;
using VULKAN_HPP_NAMESPACE::AttachmentFeedbackLoopInfoEXT;
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_external_memory_screen_buffer ===
using VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX;
@ -4770,6 +4857,9 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
//=== VK_EXT_shader_replicated_composites ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT;
//=== VK_EXT_shader_float8 ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat8FeaturesEXT;
//=== VK_NV_ray_tracing_validation ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV;
@ -4842,6 +4932,11 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
using VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT;
using VULKAN_HPP_NAMESPACE::DepthClampRangeEXT;
//=== VK_KHR_maintenance9 ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9FeaturesKHR;
using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9PropertiesKHR;
using VULKAN_HPP_NAMESPACE::QueueFamilyOwnershipTransferPropertiesKHR;
//=== VK_KHR_video_maintenance2 ===
using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance2FeaturesKHR;
using VULKAN_HPP_NAMESPACE::VideoDecodeH264InlineSessionParametersInfoKHR;
@ -4997,6 +5092,10 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
//=== VK_EXT_opacity_micromap ===
using VULKAN_HPP_NAMESPACE::MicromapEXT;
//=== VK_ARM_tensors ===
using VULKAN_HPP_NAMESPACE::TensorARM;
using VULKAN_HPP_NAMESPACE::TensorViewARM;
//=== VK_NV_optical_flow ===
using VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV;
@ -5106,6 +5205,10 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
//=== VK_EXT_opacity_micromap ===
using VULKAN_HPP_NAMESPACE::UniqueMicromapEXT;
//=== VK_ARM_tensors ===
using VULKAN_HPP_NAMESPACE::UniqueTensorARM;
using VULKAN_HPP_NAMESPACE::UniqueTensorViewARM;
//=== VK_NV_optical_flow ===
using VULKAN_HPP_NAMESPACE::UniqueOpticalFlowSessionNV;
@ -5221,6 +5324,10 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
//=== VK_EXT_opacity_micromap ===
using VULKAN_HPP_NAMESPACE::SharedMicromapEXT;
//=== VK_ARM_tensors ===
using VULKAN_HPP_NAMESPACE::SharedTensorARM;
using VULKAN_HPP_NAMESPACE::SharedTensorViewARM;
//=== VK_NV_optical_flow ===
using VULKAN_HPP_NAMESPACE::SharedOpticalFlowSessionNV;
@ -5424,6 +5531,10 @@ using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
//=== VK_EXT_opacity_micromap ===
using VULKAN_HPP_RAII_NAMESPACE::MicromapEXT;
//=== VK_ARM_tensors ===
using VULKAN_HPP_RAII_NAMESPACE::TensorARM;
using VULKAN_HPP_RAII_NAMESPACE::TensorViewARM;
//=== VK_NV_optical_flow ===
using VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV;
@ -5552,6 +5663,10 @@ export namespace std
//=== VK_EXT_opacity_micromap ===
template <> struct hash<VULKAN_HPP_NAMESPACE::MicromapEXT>;
//=== VK_ARM_tensors ===
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorViewARM>;
//=== VK_NV_optical_flow ===
template <> struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>;
@ -7026,6 +7141,32 @@ export namespace std
template <> struct hash<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG>;
template <> struct hash<VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG>;
//=== VK_ARM_tensors ===
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorDescriptionARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorCreateInfoARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::WriteDescriptorSetTensorARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorFormatPropertiesARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorPropertiesARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorDependencyInfoARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorFeaturesARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::CopyTensorInfoARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorCopyARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoTensorARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalMemoryTensorCreateInfoARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorFeaturesARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorPropertiesARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorGetTensorInfoARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM>;
template <> struct hash<VULKAN_HPP_NAMESPACE::FrameBoundaryTensorsARM>;
//=== VK_EXT_shader_module_identifier ===
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT>;
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT>;
@ -7059,6 +7200,16 @@ export namespace std
template <> struct hash<VULKAN_HPP_NAMESPACE::AntiLagDataAMD>;
template <> struct hash<VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD>;
//=== VK_KHR_present_id2 ===
template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentId2KHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::PresentId2KHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentId2FeaturesKHR>;
//=== VK_KHR_present_wait2 ===
template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentWait2KHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWait2FeaturesKHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR>;
//=== VK_KHR_ray_tracing_position_fetch ===
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR>;
@ -7181,6 +7332,12 @@ export namespace std
template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlInfoKHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR>;
//=== VK_KHR_video_decode_vp9 ===
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoDecodeVP9FeaturesKHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeVP9ProfileInfoKHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeVP9CapabilitiesKHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeVP9PictureInfoKHR>;
//=== VK_KHR_video_maintenance1 ===
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR>;
@ -7208,6 +7365,10 @@ export namespace std
//=== VK_EXT_attachment_feedback_loop_dynamic_state ===
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT>;
//=== VK_KHR_unified_image_layouts ===
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceUnifiedImageLayoutsFeaturesKHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::AttachmentFeedbackLoopInfoEXT>;
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_external_memory_screen_buffer ===
template <> struct hash<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>;
@ -7281,6 +7442,9 @@ export namespace std
//=== VK_EXT_shader_replicated_composites ===
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT>;
//=== VK_EXT_shader_float8 ===
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat8FeaturesEXT>;
//=== VK_NV_ray_tracing_validation ===
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV>;
@ -7353,6 +7517,11 @@ export namespace std
template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT>;
template <> struct hash<VULKAN_HPP_NAMESPACE::DepthClampRangeEXT>;
//=== VK_KHR_maintenance9 ===
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9FeaturesKHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9PropertiesKHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyOwnershipTransferPropertiesKHR>;
//=== VK_KHR_video_maintenance2 ===
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance2FeaturesKHR>;
template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeH264InlineSessionParametersInfoKHR>;
@ -8433,6 +8602,19 @@ export using ::PFN_vkCmdSetShadingRateImageEnableNV;
export using ::PFN_vkCmdSetRepresentativeFragmentTestEnableNV;
export using ::PFN_vkCmdSetCoverageReductionModeNV;
//=== VK_ARM_tensors ===
export using ::PFN_vkCreateTensorARM;
export using ::PFN_vkDestroyTensorARM;
export using ::PFN_vkCreateTensorViewARM;
export using ::PFN_vkDestroyTensorViewARM;
export using ::PFN_vkGetTensorMemoryRequirementsARM;
export using ::PFN_vkBindTensorMemoryARM;
export using ::PFN_vkGetDeviceTensorMemoryRequirementsARM;
export using ::PFN_vkCmdCopyTensorARM;
export using ::PFN_vkGetPhysicalDeviceExternalTensorPropertiesARM;
export using ::PFN_vkGetTensorOpaqueCaptureDescriptorDataARM;
export using ::PFN_vkGetTensorViewOpaqueCaptureDescriptorDataARM;
//=== VK_EXT_shader_module_identifier ===
export using ::PFN_vkGetShaderModuleIdentifierEXT;
export using ::PFN_vkGetShaderModuleCreateInfoIdentifierEXT;
@ -8453,6 +8635,9 @@ export using ::PFN_vkGetImageSubresourceLayout2KHR;
//=== VK_AMD_anti_lag ===
export using ::PFN_vkAntiLagUpdateAMD;
//=== VK_KHR_present_wait2 ===
export using ::PFN_vkWaitForPresent2KHR;
//=== VK_EXT_shader_object ===
export using ::PFN_vkCreateShadersEXT;
export using ::PFN_vkDestroyShaderEXT;

View file

@ -57,7 +57,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h
#endif
static_assert( VK_HEADER_VERSION == 316, "Wrong VK_HEADER_VERSION!" );
static_assert( VK_HEADER_VERSION == 317, "Wrong VK_HEADER_VERSION!" );
// <tuple> includes <sys/sysmacros.h> through some other header
// this results in major(x) being resolved to gnu_dev_major(x)
@ -5320,6 +5320,74 @@ VULKAN_HPP_INLINE void swap( UniqueHandle<Type, Dispatch> & lhs, UniqueHandle<Ty
return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode );
}
//=== VK_ARM_tensors ===
VkResult vkCreateTensorARM( VkDevice device, const VkTensorCreateInfoARM * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkTensorARM * pTensor ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateTensorARM( device, pCreateInfo, pAllocator, pTensor );
}
void vkDestroyTensorARM( VkDevice device, VkTensorARM tensor, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyTensorARM( device, tensor, pAllocator );
}
VkResult vkCreateTensorViewARM( VkDevice device, const VkTensorViewCreateInfoARM * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkTensorViewARM * pView ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateTensorViewARM( device, pCreateInfo, pAllocator, pView );
}
void vkDestroyTensorViewARM( VkDevice device, VkTensorViewARM tensorView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyTensorViewARM( device, tensorView, pAllocator );
}
void vkGetTensorMemoryRequirementsARM( VkDevice device, const VkTensorMemoryRequirementsInfoARM * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetTensorMemoryRequirementsARM( device, pInfo, pMemoryRequirements );
}
VkResult vkBindTensorMemoryARM( VkDevice device, uint32_t bindInfoCount, const VkBindTensorMemoryInfoARM * pBindInfos ) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindTensorMemoryARM( device, bindInfoCount, pBindInfos );
}
void vkGetDeviceTensorMemoryRequirementsARM( VkDevice device, const VkDeviceTensorMemoryRequirementsARM * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceTensorMemoryRequirementsARM( device, pInfo, pMemoryRequirements );
}
void vkCmdCopyTensorARM( VkCommandBuffer commandBuffer, const VkCopyTensorInfoARM * pCopyTensorInfo ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyTensorARM( commandBuffer, pCopyTensorInfo );
}
void vkGetPhysicalDeviceExternalTensorPropertiesARM( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalTensorInfoARM * pExternalTensorInfo, VkExternalTensorPropertiesARM * pExternalTensorProperties ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceExternalTensorPropertiesARM( physicalDevice, pExternalTensorInfo, pExternalTensorProperties );
}
VkResult vkGetTensorOpaqueCaptureDescriptorDataARM( VkDevice device, const VkTensorCaptureDescriptorDataInfoARM * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetTensorOpaqueCaptureDescriptorDataARM( device, pInfo, pData );
}
VkResult vkGetTensorViewOpaqueCaptureDescriptorDataARM( VkDevice device, const VkTensorViewCaptureDescriptorDataInfoARM * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetTensorViewOpaqueCaptureDescriptorDataARM( device, pInfo, pData );
}
//=== VK_EXT_shader_module_identifier ===
@ -5400,6 +5468,14 @@ VULKAN_HPP_INLINE void swap( UniqueHandle<Type, Dispatch> & lhs, UniqueHandle<Ty
return ::vkAntiLagUpdateAMD( device, pData );
}
//=== VK_KHR_present_wait2 ===
VkResult vkWaitForPresent2KHR( VkDevice device, VkSwapchainKHR swapchain, const VkPresentWait2InfoKHR * pPresentWait2Info ) const VULKAN_HPP_NOEXCEPT
{
return ::vkWaitForPresent2KHR( device, swapchain, pPresentWait2Info );
}
//=== VK_EXT_shader_object ===
@ -6713,6 +6789,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_video_decode_av1 ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxVideoAv1ReferencesPerFrameKHR = VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR;
//=== VK_KHR_video_decode_vp9 ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxVideoVp9ReferencesPerFrameKHR = VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR;
//=== VK_NV_partitioned_acceleration_structure ===
VULKAN_HPP_CONSTEXPR_INLINE uint32_t PartitionedAccelerationStructurePartitionIndexGlobalNV = VK_PARTITIONED_ACCELERATION_STRUCTURE_PARTITION_INDEX_GLOBAL_NV;
@ -8171,6 +8250,10 @@ VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubpassMergeFeedbackSpecVersion = VK_EXT_SUB
VULKAN_HPP_CONSTEXPR_INLINE auto LUNARGDirectDriverLoadingExtensionName = VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto LUNARGDirectDriverLoadingSpecVersion = VK_LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION;
//=== VK_ARM_tensors ===
VULKAN_HPP_CONSTEXPR_INLINE auto ARMTensorsExtensionName = VK_ARM_TENSORS_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto ARMTensorsSpecVersion = VK_ARM_TENSORS_SPEC_VERSION;
//=== VK_EXT_shader_module_identifier ===
VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderModuleIdentifierExtensionName = VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderModuleIdentifierSpecVersion = VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION;
@ -8205,6 +8288,14 @@ VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance5SpecVersion = VK_KHR_MAINTENANCE
VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagExtensionName = VK_AMD_ANTI_LAG_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagSpecVersion = VK_AMD_ANTI_LAG_SPEC_VERSION;
//=== VK_KHR_present_id2 ===
VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentId2ExtensionName = VK_KHR_PRESENT_ID_2_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentId2SpecVersion = VK_KHR_PRESENT_ID_2_SPEC_VERSION;
//=== VK_KHR_present_wait2 ===
VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentWait2ExtensionName = VK_KHR_PRESENT_WAIT_2_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentWait2SpecVersion = VK_KHR_PRESENT_WAIT_2_SPEC_VERSION;
//=== VK_KHR_ray_tracing_position_fetch ===
VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchExtensionName = VK_KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchSpecVersion = VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION;
@ -8289,6 +8380,10 @@ VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeAv1SpecVersion = VK_KHR_VIDEO_DEC
VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeAv1ExtensionName = VK_KHR_VIDEO_ENCODE_AV1_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeAv1SpecVersion = VK_KHR_VIDEO_ENCODE_AV1_SPEC_VERSION;
//=== VK_KHR_video_decode_vp9 ===
VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeVp9ExtensionName = VK_KHR_VIDEO_DECODE_VP9_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeVp9SpecVersion = VK_KHR_VIDEO_DECODE_VP9_SPEC_VERSION;
//=== VK_KHR_video_maintenance1 ===
VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance1ExtensionName = VK_KHR_VIDEO_MAINTENANCE_1_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance1SpecVersion = VK_KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION;
@ -8325,6 +8420,10 @@ VULKAN_HPP_CONSTEXPR_INLINE auto KHRVertexAttributeDivisorSpecVersion = VK_KHR_V
VULKAN_HPP_CONSTEXPR_INLINE auto KHRLoadStoreOpNoneExtensionName = VK_KHR_LOAD_STORE_OP_NONE_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto KHRLoadStoreOpNoneSpecVersion = VK_KHR_LOAD_STORE_OP_NONE_SPEC_VERSION;
//=== VK_KHR_unified_image_layouts ===
VULKAN_HPP_CONSTEXPR_INLINE auto KHRUnifiedImageLayoutsExtensionName = VK_KHR_UNIFIED_IMAGE_LAYOUTS_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto KHRUnifiedImageLayoutsSpecVersion = VK_KHR_UNIFIED_IMAGE_LAYOUTS_SPEC_VERSION;
//=== VK_KHR_shader_float_controls2 ===
VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControls2ExtensionName = VK_KHR_SHADER_FLOAT_CONTROLS_2_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControls2SpecVersion = VK_KHR_SHADER_FLOAT_CONTROLS_2_SPEC_VERSION;
@ -8403,6 +8502,10 @@ VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorSpecVersion = VK_NV_
VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesExtensionName = VK_EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesSpecVersion = VK_EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION;
//=== VK_EXT_shader_float8 ===
VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderFloat8ExtensionName = VK_EXT_SHADER_FLOAT8_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderFloat8SpecVersion = VK_EXT_SHADER_FLOAT8_SPEC_VERSION;
//=== VK_NV_ray_tracing_validation ===
VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationExtensionName = VK_NV_RAY_TRACING_VALIDATION_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationSpecVersion = VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION;
@ -8431,6 +8534,10 @@ VULKAN_HPP_CONSTEXPR_INLINE auto MESAImageAlignmentControlSpecVersion = VK_MESA_
VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlExtensionName = VK_EXT_DEPTH_CLAMP_CONTROL_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlSpecVersion = VK_EXT_DEPTH_CLAMP_CONTROL_SPEC_VERSION;
//=== VK_KHR_maintenance9 ===
VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance9ExtensionName = VK_KHR_MAINTENANCE_9_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance9SpecVersion = VK_KHR_MAINTENANCE_9_SPEC_VERSION;
//=== VK_KHR_video_maintenance2 ===
VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance2ExtensionName = VK_KHR_VIDEO_MAINTENANCE_2_EXTENSION_NAME;
VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance2SpecVersion = VK_KHR_VIDEO_MAINTENANCE_2_SPEC_VERSION;
@ -9358,6 +9465,8 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, AccelerationStructureCreateInfoKHR>{ enum { value = true }; };
template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, AccelerationStructureCreateInfoNV>{ enum { value = true }; };
template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, TensorCreateInfoARM>{ enum { value = true }; };
template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, TensorViewCreateInfoARM>{ enum { value = true }; };
//=== VK_EXT_graphics_pipeline_library ===
template <> struct StructExtends<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
@ -9670,6 +9779,25 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_LUNARG_direct_driver_loading ===
template <> struct StructExtends<DirectDriverLoadingListLUNARG, InstanceCreateInfo>{ enum { value = true }; };
//=== VK_ARM_tensors ===
template <> struct StructExtends<WriteDescriptorSetTensorARM, WriteDescriptorSet>{ enum { value = true }; };
template <> struct StructExtends<TensorFormatPropertiesARM, FormatProperties2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceTensorPropertiesARM, PhysicalDeviceProperties2>{ enum { value = true }; };
template <> struct StructExtends<TensorMemoryBarrierARM, DependencyInfo>{ enum { value = true }; };
template <> struct StructExtends<TensorDependencyInfoARM, DependencyInfo>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceTensorFeaturesARM, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceTensorFeaturesARM, DeviceCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<MemoryDedicatedAllocateInfoTensorARM, MemoryAllocateInfo>{ enum { value = true }; };
template <> struct StructExtends<ExternalMemoryTensorCreateInfoARM, TensorCreateInfoARM>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceDescriptorBufferTensorFeaturesARM, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceDescriptorBufferTensorFeaturesARM, DeviceCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceDescriptorBufferTensorPropertiesARM, PhysicalDeviceProperties2>{ enum { value = true }; };
template <> struct StructExtends<DescriptorGetTensorInfoARM, DescriptorGetInfoEXT>{ enum { value = true }; };
template <> struct StructExtends<FrameBoundaryTensorsARM, SubmitInfo>{ enum { value = true }; };
template <> struct StructExtends<FrameBoundaryTensorsARM, SubmitInfo2>{ enum { value = true }; };
template <> struct StructExtends<FrameBoundaryTensorsARM, PresentInfoKHR>{ enum { value = true }; };
template <> struct StructExtends<FrameBoundaryTensorsARM, BindSparseInfo>{ enum { value = true }; };
//=== VK_EXT_shader_module_identifier ===
template <> struct StructExtends<PhysicalDeviceShaderModuleIdentifierFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceShaderModuleIdentifierFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
@ -9704,6 +9832,17 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<PhysicalDeviceAntiLagFeaturesAMD, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceAntiLagFeaturesAMD, DeviceCreateInfo>{ enum { value = true }; };
//=== VK_KHR_present_id2 ===
template <> struct StructExtends<SurfaceCapabilitiesPresentId2KHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
template <> struct StructExtends<PresentId2KHR, PresentInfoKHR>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDevicePresentId2FeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDevicePresentId2FeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
//=== VK_KHR_present_wait2 ===
template <> struct StructExtends<SurfaceCapabilitiesPresentWait2KHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDevicePresentWait2FeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDevicePresentWait2FeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
//=== VK_KHR_ray_tracing_position_fetch ===
template <> struct StructExtends<PhysicalDeviceRayTracingPositionFetchFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceRayTracingPositionFetchFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
@ -9823,6 +9962,14 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<VideoEncodeAV1RateControlInfoKHR, VideoBeginCodingInfoKHR>{ enum { value = true }; };
template <> struct StructExtends<VideoEncodeAV1RateControlLayerInfoKHR, VideoEncodeRateControlLayerInfoKHR>{ enum { value = true }; };
//=== VK_KHR_video_decode_vp9 ===
template <> struct StructExtends<PhysicalDeviceVideoDecodeVP9FeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceVideoDecodeVP9FeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<VideoDecodeVP9ProfileInfoKHR, VideoProfileInfoKHR>{ enum { value = true }; };
template <> struct StructExtends<VideoDecodeVP9ProfileInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<VideoDecodeVP9CapabilitiesKHR, VideoCapabilitiesKHR>{ enum { value = true }; };
template <> struct StructExtends<VideoDecodeVP9PictureInfoKHR, VideoDecodeInfoKHR>{ enum { value = true }; };
//=== VK_KHR_video_maintenance1 ===
template <> struct StructExtends<PhysicalDeviceVideoMaintenance1FeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceVideoMaintenance1FeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
@ -9858,6 +10005,11 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
//=== VK_KHR_unified_image_layouts ===
template <> struct StructExtends<PhysicalDeviceUnifiedImageLayoutsFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceUnifiedImageLayoutsFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<AttachmentFeedbackLoopInfoEXT, RenderingAttachmentInfo>{ enum { value = true }; };
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_external_memory_screen_buffer ===
template <> struct StructExtends<ScreenBufferFormatPropertiesQNX, ScreenBufferPropertiesQNX>{ enum { value = true }; };
@ -9933,6 +10085,10 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<PhysicalDeviceShaderReplicatedCompositesFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceShaderReplicatedCompositesFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
//=== VK_EXT_shader_float8 ===
template <> struct StructExtends<PhysicalDeviceShaderFloat8FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceShaderFloat8FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
//=== VK_NV_ray_tracing_validation ===
template <> struct StructExtends<PhysicalDeviceRayTracingValidationFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceRayTracingValidationFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
@ -9977,6 +10133,12 @@ namespace VULKAN_HPP_NAMESPACE
template <> struct StructExtends<PhysicalDeviceDepthClampControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<PipelineViewportDepthClampControlCreateInfoEXT, PipelineViewportStateCreateInfo>{ enum { value = true }; };
//=== VK_KHR_maintenance9 ===
template <> struct StructExtends<PhysicalDeviceMaintenance9FeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceMaintenance9FeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceMaintenance9PropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
template <> struct StructExtends<QueueFamilyOwnershipTransferPropertiesKHR, QueueFamilyProperties2>{ enum { value = true }; };
//=== VK_KHR_video_maintenance2 ===
template <> struct StructExtends<PhysicalDeviceVideoMaintenance2FeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
template <> struct StructExtends<PhysicalDeviceVideoMaintenance2FeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
@ -11264,6 +11426,19 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0;
PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0;
//=== VK_ARM_tensors ===
PFN_vkCreateTensorARM vkCreateTensorARM = 0;
PFN_vkDestroyTensorARM vkDestroyTensorARM = 0;
PFN_vkCreateTensorViewARM vkCreateTensorViewARM = 0;
PFN_vkDestroyTensorViewARM vkDestroyTensorViewARM = 0;
PFN_vkGetTensorMemoryRequirementsARM vkGetTensorMemoryRequirementsARM = 0;
PFN_vkBindTensorMemoryARM vkBindTensorMemoryARM = 0;
PFN_vkGetDeviceTensorMemoryRequirementsARM vkGetDeviceTensorMemoryRequirementsARM = 0;
PFN_vkCmdCopyTensorARM vkCmdCopyTensorARM = 0;
PFN_vkGetPhysicalDeviceExternalTensorPropertiesARM vkGetPhysicalDeviceExternalTensorPropertiesARM = 0;
PFN_vkGetTensorOpaqueCaptureDescriptorDataARM vkGetTensorOpaqueCaptureDescriptorDataARM = 0;
PFN_vkGetTensorViewOpaqueCaptureDescriptorDataARM vkGetTensorViewOpaqueCaptureDescriptorDataARM = 0;
//=== VK_EXT_shader_module_identifier ===
PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0;
PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0;
@ -11284,6 +11459,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_AMD_anti_lag ===
PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0;
//=== VK_KHR_present_wait2 ===
PFN_vkWaitForPresent2KHR vkWaitForPresent2KHR = 0;
//=== VK_EXT_shader_object ===
PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
@ -12552,6 +12730,19 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdSetRepresentativeFragmentTestEnableNV = PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) );
//=== VK_ARM_tensors ===
vkCreateTensorARM = PFN_vkCreateTensorARM( vkGetInstanceProcAddr( instance, "vkCreateTensorARM" ) );
vkDestroyTensorARM = PFN_vkDestroyTensorARM( vkGetInstanceProcAddr( instance, "vkDestroyTensorARM" ) );
vkCreateTensorViewARM = PFN_vkCreateTensorViewARM( vkGetInstanceProcAddr( instance, "vkCreateTensorViewARM" ) );
vkDestroyTensorViewARM = PFN_vkDestroyTensorViewARM( vkGetInstanceProcAddr( instance, "vkDestroyTensorViewARM" ) );
vkGetTensorMemoryRequirementsARM = PFN_vkGetTensorMemoryRequirementsARM( vkGetInstanceProcAddr( instance, "vkGetTensorMemoryRequirementsARM" ) );
vkBindTensorMemoryARM = PFN_vkBindTensorMemoryARM( vkGetInstanceProcAddr( instance, "vkBindTensorMemoryARM" ) );
vkGetDeviceTensorMemoryRequirementsARM = PFN_vkGetDeviceTensorMemoryRequirementsARM( vkGetInstanceProcAddr( instance, "vkGetDeviceTensorMemoryRequirementsARM" ) );
vkCmdCopyTensorARM = PFN_vkCmdCopyTensorARM( vkGetInstanceProcAddr( instance, "vkCmdCopyTensorARM" ) );
vkGetPhysicalDeviceExternalTensorPropertiesARM = PFN_vkGetPhysicalDeviceExternalTensorPropertiesARM( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalTensorPropertiesARM" ) );
vkGetTensorOpaqueCaptureDescriptorDataARM = PFN_vkGetTensorOpaqueCaptureDescriptorDataARM( vkGetInstanceProcAddr( instance, "vkGetTensorOpaqueCaptureDescriptorDataARM" ) );
vkGetTensorViewOpaqueCaptureDescriptorDataARM = PFN_vkGetTensorViewOpaqueCaptureDescriptorDataARM( vkGetInstanceProcAddr( instance, "vkGetTensorViewOpaqueCaptureDescriptorDataARM" ) );
//=== VK_EXT_shader_module_identifier ===
vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) );
vkGetShaderModuleCreateInfoIdentifierEXT = PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
@ -12576,6 +12767,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_AMD_anti_lag ===
vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetInstanceProcAddr( instance, "vkAntiLagUpdateAMD" ) );
//=== VK_KHR_present_wait2 ===
vkWaitForPresent2KHR = PFN_vkWaitForPresent2KHR( vkGetInstanceProcAddr( instance, "vkWaitForPresent2KHR" ) );
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) );
@ -13567,6 +13761,18 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdSetRepresentativeFragmentTestEnableNV = PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) );
//=== VK_ARM_tensors ===
vkCreateTensorARM = PFN_vkCreateTensorARM( vkGetDeviceProcAddr( device, "vkCreateTensorARM" ) );
vkDestroyTensorARM = PFN_vkDestroyTensorARM( vkGetDeviceProcAddr( device, "vkDestroyTensorARM" ) );
vkCreateTensorViewARM = PFN_vkCreateTensorViewARM( vkGetDeviceProcAddr( device, "vkCreateTensorViewARM" ) );
vkDestroyTensorViewARM = PFN_vkDestroyTensorViewARM( vkGetDeviceProcAddr( device, "vkDestroyTensorViewARM" ) );
vkGetTensorMemoryRequirementsARM = PFN_vkGetTensorMemoryRequirementsARM( vkGetDeviceProcAddr( device, "vkGetTensorMemoryRequirementsARM" ) );
vkBindTensorMemoryARM = PFN_vkBindTensorMemoryARM( vkGetDeviceProcAddr( device, "vkBindTensorMemoryARM" ) );
vkGetDeviceTensorMemoryRequirementsARM = PFN_vkGetDeviceTensorMemoryRequirementsARM( vkGetDeviceProcAddr( device, "vkGetDeviceTensorMemoryRequirementsARM" ) );
vkCmdCopyTensorARM = PFN_vkCmdCopyTensorARM( vkGetDeviceProcAddr( device, "vkCmdCopyTensorARM" ) );
vkGetTensorOpaqueCaptureDescriptorDataARM = PFN_vkGetTensorOpaqueCaptureDescriptorDataARM( vkGetDeviceProcAddr( device, "vkGetTensorOpaqueCaptureDescriptorDataARM" ) );
vkGetTensorViewOpaqueCaptureDescriptorDataARM = PFN_vkGetTensorViewOpaqueCaptureDescriptorDataARM( vkGetDeviceProcAddr( device, "vkGetTensorViewOpaqueCaptureDescriptorDataARM" ) );
//=== VK_EXT_shader_module_identifier ===
vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) );
vkGetShaderModuleCreateInfoIdentifierEXT = PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
@ -13590,6 +13796,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_AMD_anti_lag ===
vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) );
//=== VK_KHR_present_wait2 ===
vkWaitForPresent2KHR = PFN_vkWaitForPresent2KHR( vkGetDeviceProcAddr( device, "vkWaitForPresent2KHR" ) );
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );

View file

@ -69,7 +69,7 @@ extern "C" {
#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
// Version of this file
#define VK_HEADER_VERSION 316
#define VK_HEADER_VERSION 317
// Complete version of this file
#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 4, VK_HEADER_VERSION)
@ -1056,6 +1056,30 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT = 1000458003,
VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG = 1000459000,
VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG = 1000459001,
VK_STRUCTURE_TYPE_TENSOR_CREATE_INFO_ARM = 1000460000,
VK_STRUCTURE_TYPE_TENSOR_VIEW_CREATE_INFO_ARM = 1000460001,
VK_STRUCTURE_TYPE_BIND_TENSOR_MEMORY_INFO_ARM = 1000460002,
VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_TENSOR_ARM = 1000460003,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TENSOR_PROPERTIES_ARM = 1000460004,
VK_STRUCTURE_TYPE_TENSOR_FORMAT_PROPERTIES_ARM = 1000460005,
VK_STRUCTURE_TYPE_TENSOR_DESCRIPTION_ARM = 1000460006,
VK_STRUCTURE_TYPE_TENSOR_MEMORY_REQUIREMENTS_INFO_ARM = 1000460007,
VK_STRUCTURE_TYPE_TENSOR_MEMORY_BARRIER_ARM = 1000460008,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TENSOR_FEATURES_ARM = 1000460009,
VK_STRUCTURE_TYPE_DEVICE_TENSOR_MEMORY_REQUIREMENTS_ARM = 1000460010,
VK_STRUCTURE_TYPE_COPY_TENSOR_INFO_ARM = 1000460011,
VK_STRUCTURE_TYPE_TENSOR_COPY_ARM = 1000460012,
VK_STRUCTURE_TYPE_TENSOR_DEPENDENCY_INFO_ARM = 1000460013,
VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_TENSOR_ARM = 1000460014,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_TENSOR_INFO_ARM = 1000460015,
VK_STRUCTURE_TYPE_EXTERNAL_TENSOR_PROPERTIES_ARM = 1000460016,
VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_TENSOR_CREATE_INFO_ARM = 1000460017,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_TENSOR_FEATURES_ARM = 1000460018,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_TENSOR_PROPERTIES_ARM = 1000460019,
VK_STRUCTURE_TYPE_DESCRIPTOR_GET_TENSOR_INFO_ARM = 1000460020,
VK_STRUCTURE_TYPE_TENSOR_CAPTURE_DESCRIPTOR_DATA_INFO_ARM = 1000460021,
VK_STRUCTURE_TYPE_TENSOR_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_ARM = 1000460022,
VK_STRUCTURE_TYPE_FRAME_BOUNDARY_TENSORS_ARM = 1000460023,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT = 1000462000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT = 1000462001,
VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT = 1000462002,
@ -1075,6 +1099,12 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD = 1000476000,
VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD = 1000476001,
VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD = 1000476002,
VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_ID_2_KHR = 1000479000,
VK_STRUCTURE_TYPE_PRESENT_ID_2_KHR = 1000479001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_2_FEATURES_KHR = 1000479002,
VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_WAIT_2_KHR = 1000480000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_2_FEATURES_KHR = 1000480001,
VK_STRUCTURE_TYPE_PRESENT_WAIT_2_INFO_KHR = 1000480002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR = 1000481000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT = 1000482000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT = 1000482001,
@ -1143,6 +1173,10 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUALITY_LEVEL_PROPERTIES_KHR = 1000513008,
VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_CREATE_INFO_KHR = 1000513009,
VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_GOP_REMAINING_FRAME_INFO_KHR = 1000513010,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_DECODE_VP9_FEATURES_KHR = 1000514000,
VK_STRUCTURE_TYPE_VIDEO_DECODE_VP9_CAPABILITIES_KHR = 1000514001,
VK_STRUCTURE_TYPE_VIDEO_DECODE_VP9_PICTURE_INFO_KHR = 1000514002,
VK_STRUCTURE_TYPE_VIDEO_DECODE_VP9_PROFILE_INFO_KHR = 1000514003,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR = 1000515000,
VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR = 1000515001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV = 1000516000,
@ -1156,6 +1190,8 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM = 1000520001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM = 1000521000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT = 1000524000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFIED_IMAGE_LAYOUTS_FEATURES_KHR = 1000527000,
VK_STRUCTURE_TYPE_ATTACHMENT_FEEDBACK_LOOP_INFO_EXT = 1000527001,
VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX = 1000529000,
VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX = 1000529001,
VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX = 1000529002,
@ -1197,6 +1233,7 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR = 1000562004,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV = 1000563000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT = 1000564000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT8_FEATURES_EXT = 1000567000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV = 1000568000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_FEATURES_NV = 1000569000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_PROPERTIES_NV = 1000569001,
@ -1233,6 +1270,9 @@ typedef enum VkStructureType {
VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA = 1000575002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT = 1000582000,
VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT = 1000582001,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_9_FEATURES_KHR = 1000584000,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_9_PROPERTIES_KHR = 1000584001,
VK_STRUCTURE_TYPE_QUEUE_FAMILY_OWNERSHIP_TRANSFER_PROPERTIES_KHR = 1000584002,
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_2_FEATURES_KHR = 1000586000,
VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_INLINE_SESSION_PARAMETERS_INFO_KHR = 1000586001,
VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_INLINE_SESSION_PARAMETERS_INFO_KHR = 1000586002,
@ -1541,6 +1581,7 @@ typedef enum VkImageLayout {
VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001,
VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002,
VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT = 1000339000,
VK_IMAGE_LAYOUT_TENSOR_ALIASING_ARM = 1000460000,
VK_IMAGE_LAYOUT_VIDEO_ENCODE_QUANTIZATION_MAP_KHR = 1000553000,
VK_IMAGE_LAYOUT_ZERO_INITIALIZED_EXT = 1000620000,
VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
@ -1610,6 +1651,8 @@ typedef enum VkObjectType {
#endif
VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000,
VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000,
VK_OBJECT_TYPE_TENSOR_ARM = 1000460000,
VK_OBJECT_TYPE_TENSOR_VIEW_ARM = 1000460001,
VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000,
VK_OBJECT_TYPE_SHADER_EXT = 1000482000,
VK_OBJECT_TYPE_PIPELINE_BINARY_KHR = 1000483000,
@ -1898,6 +1941,7 @@ typedef enum VkFormat {
VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005,
VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006,
VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007,
VK_FORMAT_R8_BOOL_ARM = 1000460000,
VK_FORMAT_R16G16_SFIXED5_NV = 1000464000,
VK_FORMAT_R10X6_UINT_PACK16_ARM = 1000609000,
VK_FORMAT_R10X6G10X6_UINT_2PACK16_ARM = 1000609001,
@ -2351,6 +2395,7 @@ typedef enum VkDescriptorType {
VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000,
VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM = 1000440000,
VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM = 1000440001,
VK_DESCRIPTOR_TYPE_TENSOR_ARM = 1000460000,
VK_DESCRIPTOR_TYPE_MUTABLE_EXT = 1000351000,
VK_DESCRIPTOR_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_NV = 1000570000,
VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK,
@ -2589,6 +2634,7 @@ typedef enum VkImageUsageFlagBits {
VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI = 0x00040000,
VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM = 0x00100000,
VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM = 0x00200000,
VK_IMAGE_USAGE_TENSOR_ALIASING_BIT_ARM = 0x00800000,
VK_IMAGE_USAGE_TILE_MEMORY_BIT_QCOM = 0x08000000,
VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x02000000,
VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR = 0x04000000,
@ -2738,6 +2784,11 @@ typedef enum VkQueryPipelineStatisticFlagBits {
VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkQueryPipelineStatisticFlagBits;
typedef VkFlags VkQueryPipelineStatisticFlags;
typedef enum VkQueryPoolCreateFlagBits {
VK_QUERY_POOL_CREATE_RESET_BIT_KHR = 0x00000001,
VK_QUERY_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkQueryPoolCreateFlagBits;
typedef VkFlags VkQueryPoolCreateFlags;
typedef enum VkQueryResultFlagBits {
@ -2862,9 +2913,12 @@ typedef enum VkPipelineCreateFlagBits {
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000,
#endif
// VK_PIPELINE_CREATE_DISPATCH_BASE is a deprecated alias
VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE,
VK_PIPELINE_CREATE_DISPATCH_BASE_BIT_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
// VK_PIPELINE_CREATE_DISPATCH_BASE_KHR is a deprecated alias
VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
// VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT is a deprecated alias
VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
// VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR is a deprecated alias
@ -3005,6 +3059,7 @@ typedef enum VkDependencyFlagBits {
VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002,
VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT = 0x00000008,
VK_DEPENDENCY_QUEUE_FAMILY_OWNERSHIP_TRANSFER_USE_ALL_STAGES_BIT_KHR = 0x00000020,
VK_DEPENDENCY_ASYMMETRIC_EVENT_BIT_KHR = 0x00000040,
VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT,
VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT,
VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
@ -6109,12 +6164,14 @@ typedef enum VkResolveModeFlagBits {
VK_RESOLVE_MODE_AVERAGE_BIT = 0x00000002,
VK_RESOLVE_MODE_MIN_BIT = 0x00000004,
VK_RESOLVE_MODE_MAX_BIT = 0x00000008,
VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID = 0x00000010,
VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_BIT_ANDROID = 0x00000010,
VK_RESOLVE_MODE_NONE_KHR = VK_RESOLVE_MODE_NONE,
VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
VK_RESOLVE_MODE_AVERAGE_BIT_KHR = VK_RESOLVE_MODE_AVERAGE_BIT,
VK_RESOLVE_MODE_MIN_BIT_KHR = VK_RESOLVE_MODE_MIN_BIT,
VK_RESOLVE_MODE_MAX_BIT_KHR = VK_RESOLVE_MODE_MAX_BIT,
// VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID is a deprecated alias
VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID = VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_BIT_ANDROID,
VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkResolveModeFlagBits;
typedef VkFlags VkResolveModeFlags;
@ -7071,6 +7128,8 @@ static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM = 0x800000000ULL;
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM = 0x1000000000ULL;
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM = 0x2000000000ULL;
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TENSOR_SHADER_BIT_ARM = 0x8000000000ULL;
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TENSOR_IMAGE_ALIASING_BIT_ARM = 0x80000000000ULL;
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV = 0x10000000000ULL;
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV = 0x20000000000ULL;
static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV = 0x40000000000ULL;
@ -7999,8 +8058,12 @@ static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT
typedef enum VkHostImageCopyFlagBits {
VK_HOST_IMAGE_COPY_MEMCPY = 0x00000001,
VK_HOST_IMAGE_COPY_MEMCPY_EXT = VK_HOST_IMAGE_COPY_MEMCPY,
VK_HOST_IMAGE_COPY_MEMCPY_BIT = 0x00000001,
// VK_HOST_IMAGE_COPY_MEMCPY is a deprecated alias
VK_HOST_IMAGE_COPY_MEMCPY = VK_HOST_IMAGE_COPY_MEMCPY_BIT,
VK_HOST_IMAGE_COPY_MEMCPY_BIT_EXT = VK_HOST_IMAGE_COPY_MEMCPY_BIT,
// VK_HOST_IMAGE_COPY_MEMCPY_EXT is a deprecated alias
VK_HOST_IMAGE_COPY_MEMCPY_EXT = VK_HOST_IMAGE_COPY_MEMCPY_BIT,
VK_HOST_IMAGE_COPY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
} VkHostImageCopyFlagBits;
typedef VkFlags VkHostImageCopyFlags;
@ -8688,6 +8751,8 @@ typedef enum VkSwapchainCreateFlagBitsKHR {
VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002,
VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004,
VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT = 0x00000008,
VK_SWAPCHAIN_CREATE_PRESENT_ID_2_BIT_KHR = 0x00000040,
VK_SWAPCHAIN_CREATE_PRESENT_WAIT_2_BIT_KHR = 0x00000080,
VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkSwapchainCreateFlagBitsKHR;
typedef VkFlags VkSwapchainCreateFlagsKHR;
@ -9017,6 +9082,7 @@ typedef enum VkVideoCodecOperationFlagBitsKHR {
VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR = 0x00000002,
VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR = 0x00000004,
VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR = 0x00040000,
VK_VIDEO_CODEC_OPERATION_DECODE_VP9_BIT_KHR = 0x00000008,
VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkVideoCodecOperationFlagBitsKHR;
typedef VkFlags VkVideoCodecOperationFlagsKHR;
@ -12103,6 +12169,64 @@ VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2KHR(
#endif
// VK_KHR_present_id2 is a preprocessor guard. Do not pass it to API calls.
#define VK_KHR_present_id2 1
#define VK_KHR_PRESENT_ID_2_SPEC_VERSION 1
#define VK_KHR_PRESENT_ID_2_EXTENSION_NAME "VK_KHR_present_id2"
typedef struct VkSurfaceCapabilitiesPresentId2KHR {
VkStructureType sType;
void* pNext;
VkBool32 presentId2Supported;
} VkSurfaceCapabilitiesPresentId2KHR;
typedef struct VkPresentId2KHR {
VkStructureType sType;
const void* pNext;
uint32_t swapchainCount;
const uint64_t* pPresentIds;
} VkPresentId2KHR;
typedef struct VkPhysicalDevicePresentId2FeaturesKHR {
VkStructureType sType;
void* pNext;
VkBool32 presentId2;
} VkPhysicalDevicePresentId2FeaturesKHR;
// VK_KHR_present_wait2 is a preprocessor guard. Do not pass it to API calls.
#define VK_KHR_present_wait2 1
#define VK_KHR_PRESENT_WAIT_2_SPEC_VERSION 1
#define VK_KHR_PRESENT_WAIT_2_EXTENSION_NAME "VK_KHR_present_wait2"
typedef struct VkSurfaceCapabilitiesPresentWait2KHR {
VkStructureType sType;
void* pNext;
VkBool32 presentWait2Supported;
} VkSurfaceCapabilitiesPresentWait2KHR;
typedef struct VkPhysicalDevicePresentWait2FeaturesKHR {
VkStructureType sType;
void* pNext;
VkBool32 presentWait2;
} VkPhysicalDevicePresentWait2FeaturesKHR;
typedef struct VkPresentWait2InfoKHR {
VkStructureType sType;
const void* pNext;
uint64_t presentId;
uint64_t timeout;
} VkPresentWait2InfoKHR;
typedef VkResult (VKAPI_PTR *PFN_vkWaitForPresent2KHR)(VkDevice device, VkSwapchainKHR swapchain, const VkPresentWait2InfoKHR* pPresentWait2Info);
#ifndef VK_NO_PROTOTYPES
VKAPI_ATTR VkResult VKAPI_CALL vkWaitForPresent2KHR(
VkDevice device,
VkSwapchainKHR swapchain,
const VkPresentWait2InfoKHR* pPresentWait2Info);
#endif
// VK_KHR_ray_tracing_position_fetch is a preprocessor guard. Do not pass it to API calls.
#define VK_KHR_ray_tracing_position_fetch 1
#define VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION 1
@ -12259,6 +12383,8 @@ typedef enum VkComponentTypeKHR {
VK_COMPONENT_TYPE_UINT8_PACKED_NV = 1000491001,
VK_COMPONENT_TYPE_FLOAT_E4M3_NV = 1000491002,
VK_COMPONENT_TYPE_FLOAT_E5M2_NV = 1000491003,
VK_COMPONENT_TYPE_FLOAT8_E4M3_EXT = 1000567000,
VK_COMPONENT_TYPE_FLOAT8_E5M2_EXT = 1000567001,
VK_COMPONENT_TYPE_FLOAT16_NV = VK_COMPONENT_TYPE_FLOAT16_KHR,
VK_COMPONENT_TYPE_FLOAT32_NV = VK_COMPONENT_TYPE_FLOAT32_KHR,
VK_COMPONENT_TYPE_FLOAT64_NV = VK_COMPONENT_TYPE_FLOAT64_KHR,
@ -12579,6 +12705,43 @@ typedef struct VkVideoEncodeAV1RateControlLayerInfoKHR {
// VK_KHR_video_decode_vp9 is a preprocessor guard. Do not pass it to API calls.
#define VK_KHR_video_decode_vp9 1
#include "vk_video/vulkan_video_codec_vp9std.h"
#include "vk_video/vulkan_video_codec_vp9std_decode.h"
#define VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR 3U
#define VK_KHR_VIDEO_DECODE_VP9_SPEC_VERSION 1
#define VK_KHR_VIDEO_DECODE_VP9_EXTENSION_NAME "VK_KHR_video_decode_vp9"
typedef struct VkPhysicalDeviceVideoDecodeVP9FeaturesKHR {
VkStructureType sType;
void* pNext;
VkBool32 videoDecodeVP9;
} VkPhysicalDeviceVideoDecodeVP9FeaturesKHR;
typedef struct VkVideoDecodeVP9ProfileInfoKHR {
VkStructureType sType;
const void* pNext;
StdVideoVP9Profile stdProfile;
} VkVideoDecodeVP9ProfileInfoKHR;
typedef struct VkVideoDecodeVP9CapabilitiesKHR {
VkStructureType sType;
void* pNext;
StdVideoVP9Level maxLevel;
} VkVideoDecodeVP9CapabilitiesKHR;
typedef struct VkVideoDecodeVP9PictureInfoKHR {
VkStructureType sType;
const void* pNext;
const StdVideoDecodeVP9PictureInfo* pStdPictureInfo;
int32_t referenceNameSlotIndices[VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR];
uint32_t uncompressedHeaderOffset;
uint32_t compressedHeaderOffset;
uint32_t tilesOffset;
} VkVideoDecodeVP9PictureInfoKHR;
// VK_KHR_video_maintenance1 is a preprocessor guard. Do not pass it to API calls.
#define VK_KHR_video_maintenance1 1
#define VK_KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION 1
@ -12619,6 +12782,25 @@ typedef VkPhysicalDeviceVertexAttributeDivisorFeatures VkPhysicalDeviceVertexAtt
#define VK_KHR_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_KHR_load_store_op_none"
// VK_KHR_unified_image_layouts is a preprocessor guard. Do not pass it to API calls.
#define VK_KHR_unified_image_layouts 1
#define VK_KHR_UNIFIED_IMAGE_LAYOUTS_SPEC_VERSION 1
#define VK_KHR_UNIFIED_IMAGE_LAYOUTS_EXTENSION_NAME "VK_KHR_unified_image_layouts"
typedef struct VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR {
VkStructureType sType;
void* pNext;
VkBool32 unifiedImageLayouts;
VkBool32 unifiedImageLayoutsVideo;
} VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR;
typedef struct VkAttachmentFeedbackLoopInfoEXT {
VkStructureType sType;
const void* pNext;
VkBool32 feedbackLoopEnable;
} VkAttachmentFeedbackLoopInfoEXT;
// VK_KHR_shader_float_controls2 is a preprocessor guard. Do not pass it to API calls.
#define VK_KHR_shader_float_controls2 1
#define VK_KHR_SHADER_FLOAT_CONTROLS_2_SPEC_VERSION 1
@ -12939,6 +13121,37 @@ typedef struct VkMemoryBarrierAccessFlags3KHR {
// VK_KHR_maintenance9 is a preprocessor guard. Do not pass it to API calls.
#define VK_KHR_maintenance9 1
#define VK_KHR_MAINTENANCE_9_SPEC_VERSION 1
#define VK_KHR_MAINTENANCE_9_EXTENSION_NAME "VK_KHR_maintenance9"
typedef enum VkDefaultVertexAttributeValueKHR {
VK_DEFAULT_VERTEX_ATTRIBUTE_VALUE_ZERO_ZERO_ZERO_ZERO_KHR = 0,
VK_DEFAULT_VERTEX_ATTRIBUTE_VALUE_ZERO_ZERO_ZERO_ONE_KHR = 1,
VK_DEFAULT_VERTEX_ATTRIBUTE_VALUE_MAX_ENUM_KHR = 0x7FFFFFFF
} VkDefaultVertexAttributeValueKHR;
typedef struct VkPhysicalDeviceMaintenance9FeaturesKHR {
VkStructureType sType;
void* pNext;
VkBool32 maintenance9;
} VkPhysicalDeviceMaintenance9FeaturesKHR;
typedef struct VkPhysicalDeviceMaintenance9PropertiesKHR {
VkStructureType sType;
void* pNext;
VkBool32 image2DViewOf3DSparse;
VkDefaultVertexAttributeValueKHR defaultVertexAttributeValue;
} VkPhysicalDeviceMaintenance9PropertiesKHR;
typedef struct VkQueueFamilyOwnershipTransferPropertiesKHR {
VkStructureType sType;
void* pNext;
uint32_t optimalImageTransferToQueueFamilies;
} VkQueueFamilyOwnershipTransferPropertiesKHR;
// VK_KHR_video_maintenance2 is a preprocessor guard. Do not pass it to API calls.
#define VK_KHR_video_maintenance2 1
#define VK_KHR_VIDEO_MAINTENANCE_2_SPEC_VERSION 1
@ -14924,13 +15137,17 @@ typedef enum VkGeometryInstanceFlagBitsKHR {
VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR = 0x00000002,
VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004,
VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008,
VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT = 0x00000010,
VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000020,
VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_BIT_EXT = 0x00000010,
VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_BIT_EXT = 0x00000020,
VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR,
VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
// VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT is a deprecated alias
VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT = VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_BIT_EXT,
// VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT is a deprecated alias
VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT = VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_BIT_EXT,
VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkGeometryInstanceFlagBitsKHR;
typedef VkFlags VkGeometryInstanceFlagsKHR;
@ -14946,18 +15163,30 @@ typedef enum VkBuildAccelerationStructureFlagBitsKHR {
VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008,
VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010,
VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV = 0x00000020,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT = 0x00000040,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000080,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT = 0x00000100,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_BIT_EXT = 0x00000040,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_BIT_EXT = 0x00000080,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_BIT_EXT = 0x00000100,
#ifdef VK_ENABLE_BETA_EXTENSIONS
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV = 0x00000200,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_BIT_NV = 0x00000200,
#endif
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR = 0x00000800,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_BIT_KHR = 0x00000800,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR,
// VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT is a deprecated alias
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_BIT_EXT,
// VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT is a deprecated alias
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_BIT_EXT,
// VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT is a deprecated alias
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_BIT_EXT,
#ifdef VK_ENABLE_BETA_EXTENSIONS
// VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV is a deprecated alias
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_BIT_NV,
#endif
// VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR is a deprecated alias
VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_BIT_KHR,
VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
} VkBuildAccelerationStructureFlagBitsKHR;
typedef VkFlags VkBuildAccelerationStructureFlagsKHR;
@ -19596,6 +19825,306 @@ typedef struct VkDirectDriverLoadingListLUNARG {
// VK_ARM_tensors is a preprocessor guard. Do not pass it to API calls.
#define VK_ARM_tensors 1
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkTensorARM)
VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkTensorViewARM)
#define VK_ARM_TENSORS_SPEC_VERSION 1
#define VK_ARM_TENSORS_EXTENSION_NAME "VK_ARM_tensors"
typedef enum VkTensorTilingARM {
VK_TENSOR_TILING_OPTIMAL_ARM = 0,
VK_TENSOR_TILING_LINEAR_ARM = 1,
VK_TENSOR_TILING_MAX_ENUM_ARM = 0x7FFFFFFF
} VkTensorTilingARM;
typedef VkFlags64 VkTensorCreateFlagsARM;
// Flag bits for VkTensorCreateFlagBitsARM
typedef VkFlags64 VkTensorCreateFlagBitsARM;
static const VkTensorCreateFlagBitsARM VK_TENSOR_CREATE_MUTABLE_FORMAT_BIT_ARM = 0x00000001ULL;
static const VkTensorCreateFlagBitsARM VK_TENSOR_CREATE_PROTECTED_BIT_ARM = 0x00000002ULL;
static const VkTensorCreateFlagBitsARM VK_TENSOR_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_ARM = 0x00000004ULL;
typedef VkFlags64 VkTensorViewCreateFlagsARM;
// Flag bits for VkTensorViewCreateFlagBitsARM
typedef VkFlags64 VkTensorViewCreateFlagBitsARM;
static const VkTensorViewCreateFlagBitsARM VK_TENSOR_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_ARM = 0x00000001ULL;
typedef VkFlags64 VkTensorUsageFlagsARM;
// Flag bits for VkTensorUsageFlagBitsARM
typedef VkFlags64 VkTensorUsageFlagBitsARM;
static const VkTensorUsageFlagBitsARM VK_TENSOR_USAGE_SHADER_BIT_ARM = 0x00000002ULL;
static const VkTensorUsageFlagBitsARM VK_TENSOR_USAGE_TRANSFER_SRC_BIT_ARM = 0x00000004ULL;
static const VkTensorUsageFlagBitsARM VK_TENSOR_USAGE_TRANSFER_DST_BIT_ARM = 0x00000008ULL;
static const VkTensorUsageFlagBitsARM VK_TENSOR_USAGE_IMAGE_ALIASING_BIT_ARM = 0x00000010ULL;
typedef struct VkTensorDescriptionARM {
VkStructureType sType;
const void* pNext;
VkTensorTilingARM tiling;
VkFormat format;
uint32_t dimensionCount;
const int64_t* pDimensions;
const int64_t* pStrides;
VkTensorUsageFlagsARM usage;
} VkTensorDescriptionARM;
typedef struct VkTensorCreateInfoARM {
VkStructureType sType;
const void* pNext;
VkTensorCreateFlagsARM flags;
const VkTensorDescriptionARM* pDescription;
VkSharingMode sharingMode;
uint32_t queueFamilyIndexCount;
const uint32_t* pQueueFamilyIndices;
} VkTensorCreateInfoARM;
typedef struct VkTensorViewCreateInfoARM {
VkStructureType sType;
const void* pNext;
VkTensorViewCreateFlagsARM flags;
VkTensorARM tensor;
VkFormat format;
} VkTensorViewCreateInfoARM;
typedef struct VkTensorMemoryRequirementsInfoARM {
VkStructureType sType;
const void* pNext;
VkTensorARM tensor;
} VkTensorMemoryRequirementsInfoARM;
typedef struct VkBindTensorMemoryInfoARM {
VkStructureType sType;
const void* pNext;
VkTensorARM tensor;
VkDeviceMemory memory;
VkDeviceSize memoryOffset;
} VkBindTensorMemoryInfoARM;
typedef struct VkWriteDescriptorSetTensorARM {
VkStructureType sType;
const void* pNext;
uint32_t tensorViewCount;
const VkTensorViewARM* pTensorViews;
} VkWriteDescriptorSetTensorARM;
typedef struct VkTensorFormatPropertiesARM {
VkStructureType sType;
const void* pNext;
VkFormatFeatureFlags2 optimalTilingTensorFeatures;
VkFormatFeatureFlags2 linearTilingTensorFeatures;
} VkTensorFormatPropertiesARM;
typedef struct VkPhysicalDeviceTensorPropertiesARM {
VkStructureType sType;
void* pNext;
uint32_t maxTensorDimensionCount;
uint64_t maxTensorElements;
uint64_t maxPerDimensionTensorElements;
int64_t maxTensorStride;
uint64_t maxTensorSize;
uint32_t maxTensorShaderAccessArrayLength;
uint32_t maxTensorShaderAccessSize;
uint32_t maxDescriptorSetStorageTensors;
uint32_t maxPerStageDescriptorSetStorageTensors;
uint32_t maxDescriptorSetUpdateAfterBindStorageTensors;
uint32_t maxPerStageDescriptorUpdateAfterBindStorageTensors;
VkBool32 shaderStorageTensorArrayNonUniformIndexingNative;
VkShaderStageFlags shaderTensorSupportedStages;
} VkPhysicalDeviceTensorPropertiesARM;
typedef struct VkTensorMemoryBarrierARM {
VkStructureType sType;
const void* pNext;
VkPipelineStageFlags2 srcStageMask;
VkAccessFlags2 srcAccessMask;
VkPipelineStageFlags2 dstStageMask;
VkAccessFlags2 dstAccessMask;
uint32_t srcQueueFamilyIndex;
uint32_t dstQueueFamilyIndex;
VkTensorARM tensor;
} VkTensorMemoryBarrierARM;
typedef struct VkTensorDependencyInfoARM {
VkStructureType sType;
const void* pNext;
uint32_t tensorMemoryBarrierCount;
const VkTensorMemoryBarrierARM* pTensorMemoryBarriers;
} VkTensorDependencyInfoARM;
typedef struct VkPhysicalDeviceTensorFeaturesARM {
VkStructureType sType;
void* pNext;
VkBool32 tensorNonPacked;
VkBool32 shaderTensorAccess;
VkBool32 shaderStorageTensorArrayDynamicIndexing;
VkBool32 shaderStorageTensorArrayNonUniformIndexing;
VkBool32 descriptorBindingStorageTensorUpdateAfterBind;
VkBool32 tensors;
} VkPhysicalDeviceTensorFeaturesARM;
typedef struct VkDeviceTensorMemoryRequirementsARM {
VkStructureType sType;
const void* pNext;
const VkTensorCreateInfoARM* pCreateInfo;
} VkDeviceTensorMemoryRequirementsARM;
typedef struct VkTensorCopyARM {
VkStructureType sType;
const void* pNext;
uint32_t dimensionCount;
const uint64_t* pSrcOffset;
const uint64_t* pDstOffset;
const uint64_t* pExtent;
} VkTensorCopyARM;
typedef struct VkCopyTensorInfoARM {
VkStructureType sType;
const void* pNext;
VkTensorARM srcTensor;
VkTensorARM dstTensor;
uint32_t regionCount;
const VkTensorCopyARM* pRegions;
} VkCopyTensorInfoARM;
typedef struct VkMemoryDedicatedAllocateInfoTensorARM {
VkStructureType sType;
const void* pNext;
VkTensorARM tensor;
} VkMemoryDedicatedAllocateInfoTensorARM;
typedef struct VkPhysicalDeviceExternalTensorInfoARM {
VkStructureType sType;
const void* pNext;
VkTensorCreateFlagsARM flags;
const VkTensorDescriptionARM* pDescription;
VkExternalMemoryHandleTypeFlagBits handleType;
} VkPhysicalDeviceExternalTensorInfoARM;
typedef struct VkExternalTensorPropertiesARM {
VkStructureType sType;
const void* pNext;
VkExternalMemoryProperties externalMemoryProperties;
} VkExternalTensorPropertiesARM;
typedef struct VkExternalMemoryTensorCreateInfoARM {
VkStructureType sType;
const void* pNext;
VkExternalMemoryHandleTypeFlags handleTypes;
} VkExternalMemoryTensorCreateInfoARM;
typedef struct VkPhysicalDeviceDescriptorBufferTensorFeaturesARM {
VkStructureType sType;
void* pNext;
VkBool32 descriptorBufferTensorDescriptors;
} VkPhysicalDeviceDescriptorBufferTensorFeaturesARM;
typedef struct VkPhysicalDeviceDescriptorBufferTensorPropertiesARM {
VkStructureType sType;
const void* pNext;
size_t tensorCaptureReplayDescriptorDataSize;
size_t tensorViewCaptureReplayDescriptorDataSize;
size_t tensorDescriptorSize;
} VkPhysicalDeviceDescriptorBufferTensorPropertiesARM;
typedef struct VkDescriptorGetTensorInfoARM {
VkStructureType sType;
const void* pNext;
VkTensorViewARM tensorView;
} VkDescriptorGetTensorInfoARM;
typedef struct VkTensorCaptureDescriptorDataInfoARM {
VkStructureType sType;
const void* pNext;
VkTensorARM tensor;
} VkTensorCaptureDescriptorDataInfoARM;
typedef struct VkTensorViewCaptureDescriptorDataInfoARM {
VkStructureType sType;
const void* pNext;
VkTensorViewARM tensorView;
} VkTensorViewCaptureDescriptorDataInfoARM;
typedef struct VkFrameBoundaryTensorsARM {
VkStructureType sType;
const void* pNext;
uint32_t tensorCount;
const VkTensorARM* pTensors;
} VkFrameBoundaryTensorsARM;
typedef VkResult (VKAPI_PTR *PFN_vkCreateTensorARM)(VkDevice device, const VkTensorCreateInfoARM* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkTensorARM* pTensor);
typedef void (VKAPI_PTR *PFN_vkDestroyTensorARM)(VkDevice device, VkTensorARM tensor, const VkAllocationCallbacks* pAllocator);
typedef VkResult (VKAPI_PTR *PFN_vkCreateTensorViewARM)(VkDevice device, const VkTensorViewCreateInfoARM* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkTensorViewARM* pView);
typedef void (VKAPI_PTR *PFN_vkDestroyTensorViewARM)(VkDevice device, VkTensorViewARM tensorView, const VkAllocationCallbacks* pAllocator);
typedef void (VKAPI_PTR *PFN_vkGetTensorMemoryRequirementsARM)(VkDevice device, const VkTensorMemoryRequirementsInfoARM* pInfo, VkMemoryRequirements2* pMemoryRequirements);
typedef VkResult (VKAPI_PTR *PFN_vkBindTensorMemoryARM)(VkDevice device, uint32_t bindInfoCount, const VkBindTensorMemoryInfoARM* pBindInfos);
typedef void (VKAPI_PTR *PFN_vkGetDeviceTensorMemoryRequirementsARM)(VkDevice device, const VkDeviceTensorMemoryRequirementsARM* pInfo, VkMemoryRequirements2* pMemoryRequirements);
typedef void (VKAPI_PTR *PFN_vkCmdCopyTensorARM)(VkCommandBuffer commandBuffer, const VkCopyTensorInfoARM* pCopyTensorInfo);
typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalTensorPropertiesARM)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalTensorInfoARM* pExternalTensorInfo, VkExternalTensorPropertiesARM* pExternalTensorProperties);
typedef VkResult (VKAPI_PTR *PFN_vkGetTensorOpaqueCaptureDescriptorDataARM)(VkDevice device, const VkTensorCaptureDescriptorDataInfoARM* pInfo, void* pData);
typedef VkResult (VKAPI_PTR *PFN_vkGetTensorViewOpaqueCaptureDescriptorDataARM)(VkDevice device, const VkTensorViewCaptureDescriptorDataInfoARM* pInfo, void* pData);
#ifndef VK_NO_PROTOTYPES
VKAPI_ATTR VkResult VKAPI_CALL vkCreateTensorARM(
VkDevice device,
const VkTensorCreateInfoARM* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkTensorARM* pTensor);
VKAPI_ATTR void VKAPI_CALL vkDestroyTensorARM(
VkDevice device,
VkTensorARM tensor,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR VkResult VKAPI_CALL vkCreateTensorViewARM(
VkDevice device,
const VkTensorViewCreateInfoARM* pCreateInfo,
const VkAllocationCallbacks* pAllocator,
VkTensorViewARM* pView);
VKAPI_ATTR void VKAPI_CALL vkDestroyTensorViewARM(
VkDevice device,
VkTensorViewARM tensorView,
const VkAllocationCallbacks* pAllocator);
VKAPI_ATTR void VKAPI_CALL vkGetTensorMemoryRequirementsARM(
VkDevice device,
const VkTensorMemoryRequirementsInfoARM* pInfo,
VkMemoryRequirements2* pMemoryRequirements);
VKAPI_ATTR VkResult VKAPI_CALL vkBindTensorMemoryARM(
VkDevice device,
uint32_t bindInfoCount,
const VkBindTensorMemoryInfoARM* pBindInfos);
VKAPI_ATTR void VKAPI_CALL vkGetDeviceTensorMemoryRequirementsARM(
VkDevice device,
const VkDeviceTensorMemoryRequirementsARM* pInfo,
VkMemoryRequirements2* pMemoryRequirements);
VKAPI_ATTR void VKAPI_CALL vkCmdCopyTensorARM(
VkCommandBuffer commandBuffer,
const VkCopyTensorInfoARM* pCopyTensorInfo);
VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalTensorPropertiesARM(
VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalTensorInfoARM* pExternalTensorInfo,
VkExternalTensorPropertiesARM* pExternalTensorProperties);
VKAPI_ATTR VkResult VKAPI_CALL vkGetTensorOpaqueCaptureDescriptorDataARM(
VkDevice device,
const VkTensorCaptureDescriptorDataInfoARM* pInfo,
void* pData);
VKAPI_ATTR VkResult VKAPI_CALL vkGetTensorViewOpaqueCaptureDescriptorDataARM(
VkDevice device,
const VkTensorViewCaptureDescriptorDataInfoARM* pInfo,
void* pData);
#endif
// VK_EXT_shader_module_identifier is a preprocessor guard. Do not pass it to API calls.
#define VK_EXT_shader_module_identifier 1
#define VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT 32U
@ -20753,6 +21282,19 @@ typedef struct VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT {
// VK_EXT_shader_float8 is a preprocessor guard. Do not pass it to API calls.
#define VK_EXT_shader_float8 1
#define VK_EXT_SHADER_FLOAT8_SPEC_VERSION 1
#define VK_EXT_SHADER_FLOAT8_EXTENSION_NAME "VK_EXT_shader_float8"
typedef struct VkPhysicalDeviceShaderFloat8FeaturesEXT {
VkStructureType sType;
void* pNext;
VkBool32 shaderFloat8;
VkBool32 shaderFloat8CooperativeMatrix;
} VkPhysicalDeviceShaderFloat8FeaturesEXT;
// VK_NV_ray_tracing_validation is a preprocessor guard. Do not pass it to API calls.
#define VK_NV_ray_tracing_validation 1
#define VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION 1

View file

@ -1403,6 +1403,30 @@ namespace VULKAN_HPP_NAMESPACE
eRenderPassSubpassFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT,
eDirectDriverLoadingInfoLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG,
eDirectDriverLoadingListLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG,
eTensorCreateInfoARM = VK_STRUCTURE_TYPE_TENSOR_CREATE_INFO_ARM,
eTensorViewCreateInfoARM = VK_STRUCTURE_TYPE_TENSOR_VIEW_CREATE_INFO_ARM,
eBindTensorMemoryInfoARM = VK_STRUCTURE_TYPE_BIND_TENSOR_MEMORY_INFO_ARM,
eWriteDescriptorSetTensorARM = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_TENSOR_ARM,
ePhysicalDeviceTensorPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TENSOR_PROPERTIES_ARM,
eTensorFormatPropertiesARM = VK_STRUCTURE_TYPE_TENSOR_FORMAT_PROPERTIES_ARM,
eTensorDescriptionARM = VK_STRUCTURE_TYPE_TENSOR_DESCRIPTION_ARM,
eTensorMemoryRequirementsInfoARM = VK_STRUCTURE_TYPE_TENSOR_MEMORY_REQUIREMENTS_INFO_ARM,
eTensorMemoryBarrierARM = VK_STRUCTURE_TYPE_TENSOR_MEMORY_BARRIER_ARM,
ePhysicalDeviceTensorFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TENSOR_FEATURES_ARM,
eDeviceTensorMemoryRequirementsARM = VK_STRUCTURE_TYPE_DEVICE_TENSOR_MEMORY_REQUIREMENTS_ARM,
eCopyTensorInfoARM = VK_STRUCTURE_TYPE_COPY_TENSOR_INFO_ARM,
eTensorCopyARM = VK_STRUCTURE_TYPE_TENSOR_COPY_ARM,
eTensorDependencyInfoARM = VK_STRUCTURE_TYPE_TENSOR_DEPENDENCY_INFO_ARM,
eMemoryDedicatedAllocateInfoTensorARM = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_TENSOR_ARM,
ePhysicalDeviceExternalTensorInfoARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_TENSOR_INFO_ARM,
eExternalTensorPropertiesARM = VK_STRUCTURE_TYPE_EXTERNAL_TENSOR_PROPERTIES_ARM,
eExternalMemoryTensorCreateInfoARM = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_TENSOR_CREATE_INFO_ARM,
ePhysicalDeviceDescriptorBufferTensorFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_TENSOR_FEATURES_ARM,
ePhysicalDeviceDescriptorBufferTensorPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_TENSOR_PROPERTIES_ARM,
eDescriptorGetTensorInfoARM = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_TENSOR_INFO_ARM,
eTensorCaptureDescriptorDataInfoARM = VK_STRUCTURE_TYPE_TENSOR_CAPTURE_DESCRIPTOR_DATA_INFO_ARM,
eTensorViewCaptureDescriptorDataInfoARM = VK_STRUCTURE_TYPE_TENSOR_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_ARM,
eFrameBoundaryTensorsARM = VK_STRUCTURE_TYPE_FRAME_BOUNDARY_TENSORS_ARM,
ePhysicalDeviceShaderModuleIdentifierFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT,
ePhysicalDeviceShaderModuleIdentifierPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT,
ePipelineShaderStageModuleIdentifierCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT,
@ -1425,6 +1449,12 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceAntiLagFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD,
eAntiLagDataAMD = VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD,
eAntiLagPresentationInfoAMD = VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD,
eSurfaceCapabilitiesPresentId2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_ID_2_KHR,
ePresentId2KHR = VK_STRUCTURE_TYPE_PRESENT_ID_2_KHR,
ePhysicalDevicePresentId2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_2_FEATURES_KHR,
eSurfaceCapabilitiesPresentWait2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_WAIT_2_KHR,
ePhysicalDevicePresentWait2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_2_FEATURES_KHR,
ePresentWait2InfoKHR = VK_STRUCTURE_TYPE_PRESENT_WAIT_2_INFO_KHR,
ePhysicalDeviceRayTracingPositionFetchFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR,
ePhysicalDeviceShaderObjectFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT,
ePhysicalDeviceShaderObjectPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT,
@ -1496,6 +1526,10 @@ namespace VULKAN_HPP_NAMESPACE
eVideoEncodeAv1QualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUALITY_LEVEL_PROPERTIES_KHR,
eVideoEncodeAv1SessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_CREATE_INFO_KHR,
eVideoEncodeAv1GopRemainingFrameInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_GOP_REMAINING_FRAME_INFO_KHR,
ePhysicalDeviceVideoDecodeVp9FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_DECODE_VP9_FEATURES_KHR,
eVideoDecodeVp9CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_VP9_CAPABILITIES_KHR,
eVideoDecodeVp9PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_VP9_PICTURE_INFO_KHR,
eVideoDecodeVp9ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_VP9_PROFILE_INFO_KHR,
ePhysicalDeviceVideoMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR,
eVideoInlineQueryInfoKHR = VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR,
ePhysicalDevicePerStageDescriptorSetFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV,
@ -1509,6 +1543,8 @@ namespace VULKAN_HPP_NAMESPACE
eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM,
ePhysicalDeviceCubicClampFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM,
ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT,
ePhysicalDeviceUnifiedImageLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFIED_IMAGE_LAYOUTS_FEATURES_KHR,
eAttachmentFeedbackLoopInfoEXT = VK_STRUCTURE_TYPE_ATTACHMENT_FEEDBACK_LOOP_INFO_EXT,
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
eScreenBufferPropertiesQNX = VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX,
eScreenBufferFormatPropertiesQNX = VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX,
@ -1553,6 +1589,7 @@ namespace VULKAN_HPP_NAMESPACE
ePhysicalDeviceLayeredApiVulkanPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR,
ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV,
ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT,
ePhysicalDeviceShaderFloat8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT8_FEATURES_EXT,
ePhysicalDeviceRayTracingValidationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV,
ePhysicalDeviceClusterAccelerationStructureFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_FEATURES_NV,
ePhysicalDeviceClusterAccelerationStructurePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_ACCELERATION_STRUCTURE_PROPERTIES_NV,
@ -1589,6 +1626,9 @@ namespace VULKAN_HPP_NAMESPACE
eImageAlignmentControlCreateInfoMESA = VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA,
ePhysicalDeviceDepthClampControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT,
ePipelineViewportDepthClampControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT,
ePhysicalDeviceMaintenance9FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_9_FEATURES_KHR,
ePhysicalDeviceMaintenance9PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_9_PROPERTIES_KHR,
eQueueFamilyOwnershipTransferPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_OWNERSHIP_TRANSFER_PROPERTIES_KHR,
ePhysicalDeviceVideoMaintenance2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_2_FEATURES_KHR,
eVideoDecodeH264InlineSessionParametersInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_INLINE_SESSION_PARAMETERS_INFO_KHR,
eVideoDecodeH265InlineSessionParametersInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_INLINE_SESSION_PARAMETERS_INFO_KHR,
@ -1693,6 +1733,8 @@ namespace VULKAN_HPP_NAMESPACE
eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA,
#endif /*VK_USE_PLATFORM_FUCHSIA*/
eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT,
eTensorARM = VK_OBJECT_TYPE_TENSOR_ARM,
eTensorViewARM = VK_OBJECT_TYPE_TENSOR_VIEW_ARM,
eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV,
eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT,
ePipelineBinaryKHR = VK_OBJECT_TYPE_PIPELINE_BINARY_KHR,
@ -2024,6 +2066,7 @@ namespace VULKAN_HPP_NAMESPACE
ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
eR8BoolARM = VK_FORMAT_R8_BOOL_ARM,
eR16G16Sfixed5NV = VK_FORMAT_R16G16_SFIXED5_NV,
eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV,
eR10X6UintPack16ARM = VK_FORMAT_R10X6_UINT_PACK16_ARM,
@ -2241,6 +2284,7 @@ namespace VULKAN_HPP_NAMESPACE
eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI,
eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM,
eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM,
eTensorAliasingARM = VK_IMAGE_USAGE_TENSOR_ALIASING_BIT_ARM,
eTileMemoryQCOM = VK_IMAGE_USAGE_TILE_MEMORY_BIT_QCOM,
eVideoEncodeQuantizationDeltaMapKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR,
eVideoEncodeEmphasisMapKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR
@ -2278,6 +2322,7 @@ namespace VULKAN_HPP_NAMESPACE
| ImageUsageFlagBits::eInvocationMaskHUAWEI
| ImageUsageFlagBits::eSampleWeightQCOM
| ImageUsageFlagBits::eSampleBlockMatchQCOM
| ImageUsageFlagBits::eTensorAliasingARM
| ImageUsageFlagBits::eTileMemoryQCOM
| ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR
| ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR;
@ -2773,6 +2818,25 @@ namespace VULKAN_HPP_NAMESPACE
| QueryPipelineStatisticFlagBits::eClusterCullingShaderInvocationsHUAWEI;
};
// wrapper class for enum VkQueryPoolCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolCreateFlagBits.html
enum class QueryPoolCreateFlagBits : VkQueryPoolCreateFlags{
eResetKHR = VK_QUERY_POOL_CREATE_RESET_BIT_KHR
};
// wrapper using for bitmask VkQueryPoolCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolCreateFlags.html
using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits>;
template <> struct FlagTraits<QueryPoolCreateFlagBits>
{
using WrappedType = VkQueryPoolCreateFlagBits;
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPoolCreateFlags allFlags =
QueryPoolCreateFlagBits::eResetKHR;
};
// wrapper class for enum VkQueryResultFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryResultFlagBits.html
enum class QueryResultFlagBits : VkQueryResultFlags{
e64 = VK_QUERY_RESULT_64_BIT,
@ -2823,22 +2887,6 @@ namespace VULKAN_HPP_NAMESPACE
enum class QueryPoolCreateFlagBits : VkQueryPoolCreateFlags{};
// wrapper using for bitmask VkQueryPoolCreateFlags, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkQueryPoolCreateFlags.html
using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits>;
template <> struct FlagTraits<QueryPoolCreateFlagBits>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPoolCreateFlags allFlags = {};
};
// wrapper class for enum VkBufferCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCreateFlagBits.html
enum class BufferCreateFlagBits : VkBufferCreateFlags{
eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
@ -3015,6 +3063,7 @@ namespace VULKAN_HPP_NAMESPACE
eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR,
eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR,
eAttachmentFeedbackLoopOptimalEXT = VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT,
eTensorAliasingARM = VK_IMAGE_LAYOUT_TENSOR_ALIASING_ARM,
eVideoEncodeQuantizationMapKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_QUANTIZATION_MAP_KHR,
eZeroInitializedEXT = VK_IMAGE_LAYOUT_ZERO_INITIALIZED_EXT
};
@ -3388,7 +3437,7 @@ namespace VULKAN_HPP_NAMESPACE
eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR,
eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR,
eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT_KHR,
eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT,
eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT,
eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT,
@ -3932,6 +3981,7 @@ namespace VULKAN_HPP_NAMESPACE
eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV,
eSampleWeightImageQCOM = VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM,
eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM,
eTensorARM = VK_DESCRIPTOR_TYPE_TENSOR_ARM,
eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT,
eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE,
ePartitionedAccelerationStructureNV = VK_DESCRIPTOR_TYPE_PARTITIONED_ACCELERATION_STRUCTURE_NV
@ -4087,7 +4137,8 @@ namespace VULKAN_HPP_NAMESPACE
eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR,
eFeedbackLoopEXT = VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT,
eQueueFamilyOwnershipTransferUseAllStagesKHR = VK_DEPENDENCY_QUEUE_FAMILY_OWNERSHIP_TRANSFER_USE_ALL_STAGES_BIT_KHR
eQueueFamilyOwnershipTransferUseAllStagesKHR = VK_DEPENDENCY_QUEUE_FAMILY_OWNERSHIP_TRANSFER_USE_ALL_STAGES_BIT_KHR,
eAsymmetricEventKHR = VK_DEPENDENCY_ASYMMETRIC_EVENT_BIT_KHR
};
@ -4105,7 +4156,8 @@ namespace VULKAN_HPP_NAMESPACE
| DependencyFlagBits::eDeviceGroup
| DependencyFlagBits::eViewLocal
| DependencyFlagBits::eFeedbackLoopEXT
| DependencyFlagBits::eQueueFamilyOwnershipTransferUseAllStagesKHR;
| DependencyFlagBits::eQueueFamilyOwnershipTransferUseAllStagesKHR
| DependencyFlagBits::eAsymmetricEventKHR;
};
// wrapper class for enum VkFramebufferCreateFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferCreateFlagBits.html
@ -4916,7 +4968,7 @@ namespace VULKAN_HPP_NAMESPACE
eMin = VK_RESOLVE_MODE_MIN_BIT,
eMax = VK_RESOLVE_MODE_MAX_BIT,
#if defined( VK_USE_PLATFORM_ANDROID_KHR )
eExternalFormatDownsampleANDROID = VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID
eExternalFormatDownsampleANDROID = VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_BIT_ANDROID
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
};
@ -5386,6 +5438,8 @@ namespace VULKAN_HPP_NAMESPACE
eWeightSampledImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM,
eBlockMatchingQCOM = VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM,
eBoxFilterSampledQCOM = VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM,
eTensorShaderARM = VK_FORMAT_FEATURE_2_TENSOR_SHADER_BIT_ARM,
eTensorImageAliasingARM = VK_FORMAT_FEATURE_2_TENSOR_IMAGE_ALIASING_BIT_ARM,
eOpticalFlowImageNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV,
eOpticalFlowVectorNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV,
eOpticalFlowCostNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV,
@ -5447,6 +5501,8 @@ namespace VULKAN_HPP_NAMESPACE
| FormatFeatureFlagBits2::eWeightSampledImageQCOM
| FormatFeatureFlagBits2::eBlockMatchingQCOM
| FormatFeatureFlagBits2::eBoxFilterSampledQCOM
| FormatFeatureFlagBits2::eTensorShaderARM
| FormatFeatureFlagBits2::eTensorImageAliasingARM
| FormatFeatureFlagBits2::eOpticalFlowImageNV
| FormatFeatureFlagBits2::eOpticalFlowVectorNV
| FormatFeatureFlagBits2::eOpticalFlowCostNV
@ -5713,7 +5769,7 @@ namespace VULKAN_HPP_NAMESPACE
// wrapper class for enum VkHostImageCopyFlagBits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageCopyFlagBits.html
enum class HostImageCopyFlagBits : VkHostImageCopyFlags{
eMemcpy = VK_HOST_IMAGE_COPY_MEMCPY
eMemcpy = VK_HOST_IMAGE_COPY_MEMCPY_BIT
};
using HostImageCopyFlagBitsEXT = HostImageCopyFlagBits;
@ -5838,7 +5894,9 @@ namespace VULKAN_HPP_NAMESPACE
eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR,
eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR,
eDeferredMemoryAllocationEXT = VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT
eDeferredMemoryAllocationEXT = VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT,
ePresentId2 = VK_SWAPCHAIN_CREATE_PRESENT_ID_2_BIT_KHR,
ePresentWait2 = VK_SWAPCHAIN_CREATE_PRESENT_WAIT_2_BIT_KHR
};
@ -5855,7 +5913,9 @@ namespace VULKAN_HPP_NAMESPACE
SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions
| SwapchainCreateFlagBitsKHR::eProtected
| SwapchainCreateFlagBitsKHR::eMutableFormat
| SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT;
| SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT
| SwapchainCreateFlagBitsKHR::ePresentId2
| SwapchainCreateFlagBitsKHR::ePresentWait2;
};
// wrapper class for enum VkDeviceGroupPresentModeFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupPresentModeFlagBitsKHR.html
@ -6146,7 +6206,8 @@ namespace VULKAN_HPP_NAMESPACE
eDecodeH264 = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR,
eDecodeH265 = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR,
eDecodeAv1 = VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR,
eEncodeAv1 = VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR
eEncodeAv1 = VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR,
eDecodeVp9 = VK_VIDEO_CODEC_OPERATION_DECODE_VP9_BIT_KHR
};
@ -6166,7 +6227,8 @@ namespace VULKAN_HPP_NAMESPACE
| VideoCodecOperationFlagBitsKHR::eDecodeH264
| VideoCodecOperationFlagBitsKHR::eDecodeH265
| VideoCodecOperationFlagBitsKHR::eDecodeAv1
| VideoCodecOperationFlagBitsKHR::eEncodeAv1;
| VideoCodecOperationFlagBitsKHR::eEncodeAv1
| VideoCodecOperationFlagBitsKHR::eDecodeVp9;
};
// wrapper class for enum VkVideoChromaSubsamplingFlagBitsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkVideoChromaSubsamplingFlagBitsKHR.html
@ -7326,8 +7388,8 @@ namespace VULKAN_HPP_NAMESPACE
eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
eForceOpacityMicromap2StateEXT = VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT,
eDisableOpacityMicromapsEXT = VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT
eForceOpacityMicromap2StateEXT = VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_BIT_EXT,
eDisableOpacityMicromapsEXT = VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_BIT_EXT
};
using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR;
@ -7359,13 +7421,14 @@ namespace VULKAN_HPP_NAMESPACE
ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR,
eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV,
eAllowOpacityMicromapUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT,
eAllowDisableOpacityMicromapsEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT,
eAllowOpacityMicromapDataUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT,
eAllowOpacityMicromapUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_BIT_EXT,
eAllowDisableOpacityMicromapsEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_BIT_EXT,
eAllowOpacityMicromapDataUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_BIT_EXT,
#if defined( VK_ENABLE_BETA_EXTENSIONS )
eAllowDisplacementMicromapUpdateNV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV,
eAllowDisplacementMicromapUpdateNV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_BIT_NV,
eAllowDisplacementMicromapUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV,
#endif /*VK_ENABLE_BETA_EXTENSIONS*/
eAllowDataAccess = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR
eAllowDataAccess = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_BIT_KHR
};
using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR;
@ -8793,6 +8856,83 @@ namespace VULKAN_HPP_NAMESPACE
static VULKAN_HPP_CONST_OR_CONSTEXPR DirectDriverLoadingFlagsLUNARG allFlags = {};
};
//=== VK_ARM_tensors ===
// wrapper class for enum VkTensorCreateFlagBitsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorCreateFlagBitsARM.html
enum class TensorCreateFlagBitsARM : VkTensorCreateFlagsARM{
eMutableFormat = VK_TENSOR_CREATE_MUTABLE_FORMAT_BIT_ARM,
eProtected = VK_TENSOR_CREATE_PROTECTED_BIT_ARM,
eDescriptorBufferCaptureReplay = VK_TENSOR_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_ARM
};
// wrapper using for bitmask VkTensorCreateFlagsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorCreateFlagsARM.html
using TensorCreateFlagsARM = Flags<TensorCreateFlagBitsARM>;
template <> struct FlagTraits<TensorCreateFlagBitsARM>
{
using WrappedType = VkTensorCreateFlagBitsARM;
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR TensorCreateFlagsARM allFlags =
TensorCreateFlagBitsARM::eMutableFormat
| TensorCreateFlagBitsARM::eProtected
| TensorCreateFlagBitsARM::eDescriptorBufferCaptureReplay;
};
// wrapper class for enum VkTensorViewCreateFlagBitsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCreateFlagBitsARM.html
enum class TensorViewCreateFlagBitsARM : VkTensorViewCreateFlagsARM{
eDescriptorBufferCaptureReplay = VK_TENSOR_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_ARM
};
// wrapper using for bitmask VkTensorViewCreateFlagsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewCreateFlagsARM.html
using TensorViewCreateFlagsARM = Flags<TensorViewCreateFlagBitsARM>;
template <> struct FlagTraits<TensorViewCreateFlagBitsARM>
{
using WrappedType = VkTensorViewCreateFlagBitsARM;
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR TensorViewCreateFlagsARM allFlags =
TensorViewCreateFlagBitsARM::eDescriptorBufferCaptureReplay;
};
// wrapper class for enum VkTensorUsageFlagBitsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorUsageFlagBitsARM.html
enum class TensorUsageFlagBitsARM : VkTensorUsageFlagsARM{
eShader = VK_TENSOR_USAGE_SHADER_BIT_ARM,
eTransferSrc = VK_TENSOR_USAGE_TRANSFER_SRC_BIT_ARM,
eTransferDst = VK_TENSOR_USAGE_TRANSFER_DST_BIT_ARM,
eImageAliasing = VK_TENSOR_USAGE_IMAGE_ALIASING_BIT_ARM
};
// wrapper using for bitmask VkTensorUsageFlagsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorUsageFlagsARM.html
using TensorUsageFlagsARM = Flags<TensorUsageFlagBitsARM>;
template <> struct FlagTraits<TensorUsageFlagBitsARM>
{
using WrappedType = VkTensorUsageFlagBitsARM;
static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
static VULKAN_HPP_CONST_OR_CONSTEXPR TensorUsageFlagsARM allFlags =
TensorUsageFlagBitsARM::eShader
| TensorUsageFlagBitsARM::eTransferSrc
| TensorUsageFlagBitsARM::eTransferDst
| TensorUsageFlagBitsARM::eImageAliasing;
};
// wrapper class for enum VkTensorTilingARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorTilingARM.html
enum class TensorTilingARM{
eOptimal = VK_TENSOR_TILING_OPTIMAL_ARM,
eLinear = VK_TENSOR_TILING_LINEAR_ARM
};
//=== VK_NV_optical_flow ===
// wrapper class for enum VkOpticalFlowUsageFlagBitsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowUsageFlagBitsNV.html
@ -9023,7 +9163,9 @@ namespace VULKAN_HPP_NAMESPACE
eSint8PackedNV = VK_COMPONENT_TYPE_SINT8_PACKED_NV,
eUint8PackedNV = VK_COMPONENT_TYPE_UINT8_PACKED_NV,
eFloatE4M3NV = VK_COMPONENT_TYPE_FLOAT_E4M3_NV,
eFloatE5M2NV = VK_COMPONENT_TYPE_FLOAT_E5M2_NV
eFloatE5M2NV = VK_COMPONENT_TYPE_FLOAT_E5M2_NV,
eFloat8E4M3EXT = VK_COMPONENT_TYPE_FLOAT8_E4M3_EXT,
eFloat8E5M2EXT = VK_COMPONENT_TYPE_FLOAT8_E5M2_EXT
};
using ComponentTypeNV = ComponentTypeKHR;
@ -9611,6 +9753,16 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_KHR_maintenance9 ===
// wrapper class for enum VkDefaultVertexAttributeValueKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDefaultVertexAttributeValueKHR.html
enum class DefaultVertexAttributeValueKHR{
eZeroZeroZeroZero = VK_DEFAULT_VERTEX_ATTRIBUTE_VALUE_ZERO_ZERO_ZERO_ZERO_KHR,
eZeroZeroZeroOne = VK_DEFAULT_VERTEX_ATTRIBUTE_VALUE_ZERO_ZERO_ZERO_ONE_KHR
};
//===========================================================
@ -9712,6 +9864,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_opacity_micromap ===
case VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT : return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
//=== VK_ARM_tensors ===
case VULKAN_HPP_NAMESPACE::ObjectType::eTensorARM : return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
case VULKAN_HPP_NAMESPACE::ObjectType::eTensorViewARM : return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
//=== VK_NV_optical_flow ===
case VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV : return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;

View file

@ -403,6 +403,7 @@ namespace VULKAN_HPP_NAMESPACE
"VK_EXT_external_memory_acquire_unmodified",
"VK_EXT_extended_dynamic_state3",
"VK_EXT_subpass_merge_feedback",
"VK_ARM_tensors",
"VK_EXT_shader_module_identifier",
"VK_EXT_rasterization_order_attachment_access",
"VK_NV_optical_flow",
@ -413,6 +414,8 @@ namespace VULKAN_HPP_NAMESPACE
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
"VK_KHR_maintenance5",
"VK_AMD_anti_lag",
"VK_KHR_present_id2",
"VK_KHR_present_wait2",
"VK_KHR_ray_tracing_position_fetch",
"VK_EXT_shader_object",
"VK_KHR_pipeline_binary",
@ -433,6 +436,7 @@ namespace VULKAN_HPP_NAMESPACE
"VK_KHR_compute_shader_derivatives",
"VK_KHR_video_decode_av1",
"VK_KHR_video_encode_av1",
"VK_KHR_video_decode_vp9",
"VK_KHR_video_maintenance1",
"VK_NV_per_stage_descriptor_set",
"VK_QCOM_image_processing2",
@ -442,6 +446,7 @@ namespace VULKAN_HPP_NAMESPACE
"VK_EXT_attachment_feedback_loop_dynamic_state",
"VK_KHR_vertex_attribute_divisor",
"VK_KHR_load_store_op_none",
"VK_KHR_unified_image_layouts",
"VK_KHR_shader_float_controls2",
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
"VK_QNX_external_memory_screen_buffer",
@ -462,6 +467,7 @@ namespace VULKAN_HPP_NAMESPACE
"VK_KHR_maintenance7",
"VK_NV_shader_atomic_float16_vector",
"VK_EXT_shader_replicated_composites",
"VK_EXT_shader_float8",
"VK_NV_ray_tracing_validation",
"VK_NV_cluster_acceleration_structure",
"VK_NV_partitioned_acceleration_structure",
@ -469,6 +475,7 @@ namespace VULKAN_HPP_NAMESPACE
"VK_KHR_maintenance8",
"VK_MESA_image_alignment_control",
"VK_EXT_depth_clamp_control",
"VK_KHR_maintenance9",
"VK_KHR_video_maintenance2",
"VK_HUAWEI_hdr_vivid",
"VK_NV_cooperative_matrix2",
@ -870,6 +877,7 @@ namespace VULKAN_HPP_NAMESPACE
{ "VK_EXT_external_memory_acquire_unmodified", { { "VK_VERSION_1_0", { { "VK_KHR_external_memory", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_EXT_extended_dynamic_state3", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_EXT_subpass_merge_feedback", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_ARM_tensors", { { "VK_VERSION_1_3", { { } } } } },
{ "VK_EXT_shader_module_identifier", { { "VK_VERSION_1_0", { { "VK_EXT_pipeline_creation_cache_control", "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { "VK_EXT_pipeline_creation_cache_control", } } }, { "VK_VERSION_1_3", { { } } } } },
{ "VK_EXT_rasterization_order_attachment_access", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_NV_optical_flow", { { "VK_VERSION_1_0", { { "VK_KHR_format_feature_flags2", "VK_KHR_get_physical_device_properties2", "VK_KHR_synchronization2", } } }, { "VK_VERSION_1_1", { { "VK_KHR_format_feature_flags2", "VK_KHR_synchronization2", } } }, { "VK_VERSION_1_3", { { } } } } },
@ -879,6 +887,8 @@ namespace VULKAN_HPP_NAMESPACE
{ "VK_ANDROID_external_format_resolve", { { "VK_VERSION_1_0", { { "VK_ANDROID_external_memory_android_hardware_buffer", } } } } },
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
{ "VK_KHR_maintenance5", { { "VK_VERSION_1_1", { { "VK_KHR_dynamic_rendering", } } }, { "VK_VERSION_1_3", { { } } } } },
{ "VK_KHR_present_id2", { { "VK_VERSION_1_0", { { "VK_KHR_get_surface_capabilities2", "VK_KHR_surface", "VK_KHR_swapchain", } } } } },
{ "VK_KHR_present_wait2", { { "VK_VERSION_1_0", { { "VK_KHR_get_surface_capabilities2", "VK_KHR_present_id2", "VK_KHR_surface", "VK_KHR_swapchain", } } } } },
{ "VK_KHR_ray_tracing_position_fetch", { { "VK_VERSION_1_0", { { "VK_KHR_acceleration_structure", } } } } },
{ "VK_EXT_shader_object", { { "VK_VERSION_1_0", { { "VK_KHR_dynamic_rendering", "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { "VK_KHR_dynamic_rendering", } } }, { "VK_VERSION_1_3", { { } } } } },
{ "VK_KHR_pipeline_binary", { { "VK_VERSION_1_0", { { "VK_KHR_maintenance5", } } }, { "VK_VERSION_1_4", { { } } } } },
@ -891,11 +901,12 @@ namespace VULKAN_HPP_NAMESPACE
{ "VK_ARM_shader_core_builtins", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_EXT_pipeline_library_group_handles", { { "VK_VERSION_1_0", { { "VK_KHR_pipeline_library", "VK_KHR_ray_tracing_pipeline", } } } } },
{ "VK_EXT_dynamic_rendering_unused_attachments", { { "VK_VERSION_1_0", { { "VK_KHR_dynamic_rendering", "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { "VK_KHR_dynamic_rendering", } } }, { "VK_VERSION_1_3", { { } } } } },
{ "VK_NV_low_latency2", { { "VK_VERSION_1_0", { { "VK_KHR_timeline_semaphore", } } }, { "VK_VERSION_1_2", { { } } } } },
{ "VK_NV_low_latency2", { { "VK_VERSION_1_0", { { "VK_KHR_present_id", "VK_KHR_timeline_semaphore", }, { "VK_KHR_present_id2", "VK_KHR_timeline_semaphore", } } }, { "VK_VERSION_1_2", { { "VK_KHR_present_id", }, { "VK_KHR_present_id2", } } } } },
{ "VK_KHR_cooperative_matrix", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_KHR_compute_shader_derivatives", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_KHR_video_decode_av1", { { "VK_VERSION_1_0", { { "VK_KHR_video_decode_queue", } } } } },
{ "VK_KHR_video_encode_av1", { { "VK_VERSION_1_0", { { "VK_KHR_video_encode_queue", } } } } },
{ "VK_KHR_video_decode_vp9", { { "VK_VERSION_1_0", { { "VK_KHR_video_decode_queue", } } } } },
{ "VK_KHR_video_maintenance1", { { "VK_VERSION_1_0", { { "VK_KHR_video_queue", } } } } },
{ "VK_NV_per_stage_descriptor_set", { { "VK_VERSION_1_0", { { "VK_KHR_maintenance6", } } }, { "VK_VERSION_1_4", { { } } } } },
{ "VK_QCOM_image_processing2", { { "VK_VERSION_1_0", { { "VK_QCOM_image_processing", } } } } },
@ -924,6 +935,7 @@ namespace VULKAN_HPP_NAMESPACE
{ "VK_KHR_maintenance8", { { "VK_VERSION_1_1", { { } } } } },
{ "VK_MESA_image_alignment_control", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_EXT_depth_clamp_control", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_KHR_maintenance9", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { { } } } } },
{ "VK_KHR_video_maintenance2", { { "VK_VERSION_1_0", { { "VK_KHR_video_queue", } } } } },
{ "VK_HUAWEI_hdr_vivid", { { "VK_VERSION_1_0", { { "VK_EXT_hdr_metadata", "VK_KHR_get_physical_device_properties2", "VK_KHR_swapchain", } } }, { "VK_VERSION_1_1", { { "VK_EXT_hdr_metadata", "VK_KHR_swapchain", } } } } },
{ "VK_NV_cooperative_matrix2", { { "VK_VERSION_1_0", { { "VK_KHR_cooperative_matrix", } } } } },
@ -1083,7 +1095,8 @@ namespace VULKAN_HPP_NAMESPACE
{ "VK_KHR_index_type_uint8", "VK_VERSION_1_4"},
{ "VK_KHR_line_rasterization", "VK_VERSION_1_4"},
{ "VK_KHR_shader_expect_assume", "VK_VERSION_1_4"},
{ "VK_KHR_maintenance6", "VK_VERSION_1_4"} };
{ "VK_KHR_maintenance6", "VK_VERSION_1_4"},
{ "VK_EXT_vertex_attribute_robustness", "VK_KHR_maintenance9"} };
return promotedExtensions;
}
@ -1237,6 +1250,7 @@ namespace VULKAN_HPP_NAMESPACE
if ( extension == "VK_KHR_line_rasterization" ) { return "VK_VERSION_1_4"; }
if ( extension == "VK_KHR_shader_expect_assume" ) { return "VK_VERSION_1_4"; }
if ( extension == "VK_KHR_maintenance6" ) { return "VK_VERSION_1_4"; }
if ( extension == "VK_EXT_vertex_attribute_robustness" ) { return "VK_KHR_maintenance9"; }
return "";
}
@ -1598,6 +1612,7 @@ false;
|| ( extension == "VK_EXT_external_memory_acquire_unmodified" )
|| ( extension == "VK_EXT_extended_dynamic_state3" )
|| ( extension == "VK_EXT_subpass_merge_feedback" )
|| ( extension == "VK_ARM_tensors" )
|| ( extension == "VK_EXT_shader_module_identifier" )
|| ( extension == "VK_EXT_rasterization_order_attachment_access" )
|| ( extension == "VK_NV_optical_flow" )
@ -1608,6 +1623,8 @@ false;
#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
|| ( extension == "VK_KHR_maintenance5" )
|| ( extension == "VK_AMD_anti_lag" )
|| ( extension == "VK_KHR_present_id2" )
|| ( extension == "VK_KHR_present_wait2" )
|| ( extension == "VK_KHR_ray_tracing_position_fetch" )
|| ( extension == "VK_EXT_shader_object" )
|| ( extension == "VK_KHR_pipeline_binary" )
@ -1628,6 +1645,7 @@ false;
|| ( extension == "VK_KHR_compute_shader_derivatives" )
|| ( extension == "VK_KHR_video_decode_av1" )
|| ( extension == "VK_KHR_video_encode_av1" )
|| ( extension == "VK_KHR_video_decode_vp9" )
|| ( extension == "VK_KHR_video_maintenance1" )
|| ( extension == "VK_NV_per_stage_descriptor_set" )
|| ( extension == "VK_QCOM_image_processing2" )
@ -1637,6 +1655,7 @@ false;
|| ( extension == "VK_EXT_attachment_feedback_loop_dynamic_state" )
|| ( extension == "VK_KHR_vertex_attribute_divisor" )
|| ( extension == "VK_KHR_load_store_op_none" )
|| ( extension == "VK_KHR_unified_image_layouts" )
|| ( extension == "VK_KHR_shader_float_controls2" )
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
|| ( extension == "VK_QNX_external_memory_screen_buffer" )
@ -1657,6 +1676,7 @@ false;
|| ( extension == "VK_KHR_maintenance7" )
|| ( extension == "VK_NV_shader_atomic_float16_vector" )
|| ( extension == "VK_EXT_shader_replicated_composites" )
|| ( extension == "VK_EXT_shader_float8" )
|| ( extension == "VK_NV_ray_tracing_validation" )
|| ( extension == "VK_NV_cluster_acceleration_structure" )
|| ( extension == "VK_NV_partitioned_acceleration_structure" )
@ -1664,6 +1684,7 @@ false;
|| ( extension == "VK_KHR_maintenance8" )
|| ( extension == "VK_MESA_image_alignment_control" )
|| ( extension == "VK_EXT_depth_clamp_control" )
|| ( extension == "VK_KHR_maintenance9" )
|| ( extension == "VK_KHR_video_maintenance2" )
|| ( extension == "VK_HUAWEI_hdr_vivid" )
|| ( extension == "VK_NV_cooperative_matrix2" )
@ -1869,7 +1890,8 @@ false;
( extension == "VK_KHR_index_type_uint8" ) ||
( extension == "VK_KHR_line_rasterization" ) ||
( extension == "VK_KHR_shader_expect_assume" ) ||
( extension == "VK_KHR_maintenance6" );
( extension == "VK_KHR_maintenance6" ) ||
( extension == "VK_EXT_vertex_attribute_robustness" );
}
} // namespace VULKAN_HPP_NAMESPACE

File diff suppressed because one or more lines are too long

View file

@ -22973,6 +22973,446 @@ VULKAN_HPP_ASSERT( d.vkCmdSetCoverageModulationTableNV && "Function <vkCmdSetCov
d.vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
}
//=== VK_ARM_tensors ===
// wrapper function for command vkCreateTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createTensorARM( const VULKAN_HPP_NAMESPACE::TensorCreateInfoARM * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::TensorARM * pTensor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateTensorARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorCreateInfoARM *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkTensorARM *>( pTensor ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkCreateTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::TensorARM>::type Device::createTensorARM( const VULKAN_HPP_NAMESPACE::TensorCreateInfoARM & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkCreateTensorARM && "Function <vkCreateTensorARM> requires <VK_ARM_tensors>" );
#endif
VULKAN_HPP_NAMESPACE::TensorARM tensor;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateTensorARM( m_device, reinterpret_cast<const VkTensorCreateInfoARM *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkTensorARM *>( &tensor ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createTensorARM" );
return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( tensor ) );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
// wrapper function for command vkCreateTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::TensorARM, Dispatch>>::type Device::createTensorARMUnique( const VULKAN_HPP_NAMESPACE::TensorCreateInfoARM & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkCreateTensorARM && "Function <vkCreateTensorARM> requires <VK_ARM_tensors>" );
#endif
VULKAN_HPP_NAMESPACE::TensorARM tensor;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateTensorARM( m_device, reinterpret_cast<const VkTensorCreateInfoARM *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkTensorARM *>( &tensor ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createTensorARMUnique" );
return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle<VULKAN_HPP_NAMESPACE::TensorARM, Dispatch>( tensor, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyTensorARM( VULKAN_HPP_NAMESPACE::TensorARM tensor, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyTensorARM( static_cast<VkDevice>( m_device ), static_cast<VkTensorARM>( tensor ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyTensorARM( VULKAN_HPP_NAMESPACE::TensorARM tensor, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkDestroyTensorARM && "Function <vkDestroyTensorARM> requires <VK_ARM_tensors>" );
#endif
d.vkDestroyTensorARM( m_device, static_cast<VkTensorARM>( tensor ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::TensorARM tensor, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyTensorARM( static_cast<VkDevice>( m_device ), static_cast<VkTensorARM>( tensor ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::TensorARM tensor, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkDestroyTensorARM && "Function <vkDestroyTensorARM> requires <VK_ARM_tensors>" );
#endif
d.vkDestroyTensorARM( m_device, static_cast<VkTensorARM>( tensor ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkCreateTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorViewARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createTensorViewARM( const VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::TensorViewARM * pView, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkCreateTensorViewARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorViewCreateInfoARM *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkTensorViewARM *>( pView ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkCreateTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorViewARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::TensorViewARM>::type Device::createTensorViewARM( const VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkCreateTensorViewARM && "Function <vkCreateTensorViewARM> requires <VK_ARM_tensors>" );
#endif
VULKAN_HPP_NAMESPACE::TensorViewARM view;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateTensorViewARM( m_device, reinterpret_cast<const VkTensorViewCreateInfoARM *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkTensorViewARM *>( &view ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createTensorViewARM" );
return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) );
}
# ifndef VULKAN_HPP_NO_SMART_HANDLE
// wrapper function for command vkCreateTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorViewARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::TensorViewARM, Dispatch>>::type Device::createTensorViewARMUnique( const VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkCreateTensorViewARM && "Function <vkCreateTensorViewARM> requires <VK_ARM_tensors>" );
#endif
VULKAN_HPP_NAMESPACE::TensorViewARM view;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkCreateTensorViewARM( m_device, reinterpret_cast<const VkTensorViewCreateInfoARM *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkTensorViewARM *>( &view ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createTensorViewARMUnique" );
return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle<VULKAN_HPP_NAMESPACE::TensorViewARM, Dispatch>( view, detail::ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
}
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyTensorViewARM( VULKAN_HPP_NAMESPACE::TensorViewARM tensorView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyTensorViewARM( static_cast<VkDevice>( m_device ), static_cast<VkTensorViewARM>( tensorView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroyTensorViewARM( VULKAN_HPP_NAMESPACE::TensorViewARM tensorView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkDestroyTensorViewARM && "Function <vkDestroyTensorViewARM> requires <VK_ARM_tensors>" );
#endif
d.vkDestroyTensorViewARM( m_device, static_cast<VkTensorViewARM>( tensorView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::TensorViewARM tensorView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkDestroyTensorViewARM( static_cast<VkDevice>( m_device ), static_cast<VkTensorViewARM>( tensorView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::TensorViewARM tensorView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkDestroyTensorViewARM && "Function <vkDestroyTensorViewARM> requires <VK_ARM_tensors>" );
#endif
d.vkDestroyTensorViewARM( m_device, static_cast<VkTensorViewARM>( tensorView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkGetTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetTensorMemoryRequirementsARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorMemoryRequirementsInfoARM *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkGetTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkGetTensorMemoryRequirementsARM && "Function <vkGetTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" );
#endif
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
d.vkGetTensorMemoryRequirementsARM( m_device, reinterpret_cast<const VkTensorMemoryRequirementsInfoARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return memoryRequirements;
}
// wrapper function for command vkGetTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkGetTensorMemoryRequirementsARM && "Function <vkGetTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" );
#endif
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
d.vkGetTensorMemoryRequirementsARM( m_device, reinterpret_cast<const VkTensorMemoryRequirementsInfoARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return structureChain;
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkBindTensorMemoryARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindTensorMemoryARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindTensorMemoryARM( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkBindTensorMemoryARM( static_cast<VkDevice>( m_device ), bindInfoCount, reinterpret_cast<const VkBindTensorMemoryInfoARM *>( pBindInfos ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkBindTensorMemoryARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindTensorMemoryARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindTensorMemoryARM( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM> const & bindInfos, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkBindTensorMemoryARM && "Function <vkBindTensorMemoryARM> requires <VK_ARM_tensors>" );
#endif
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkBindTensorMemoryARM( m_device, bindInfos.size(), reinterpret_cast<const VkBindTensorMemoryInfoARM *>( bindInfos.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindTensorMemoryARM" );
return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void Device::getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetDeviceTensorMemoryRequirementsARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkGetDeviceTensorMemoryRequirementsARM && "Function <vkGetDeviceTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" );
#endif
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
d.vkGetDeviceTensorMemoryRequirementsARM( m_device, reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return memoryRequirements;
}
// wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html
template <typename X, typename Y, typename... Z, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkGetDeviceTensorMemoryRequirementsARM && "Function <vkGetDeviceTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" );
#endif
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
d.vkGetDeviceTensorMemoryRequirementsARM( m_device, reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return structureChain;
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkCmdCopyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyTensorARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyTensorARM( const VULKAN_HPP_NAMESPACE::CopyTensorInfoARM * pCopyTensorInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkCmdCopyTensorARM( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyTensorInfoARM *>( pCopyTensorInfo ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkCmdCopyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyTensorARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void CommandBuffer::copyTensorARM( const VULKAN_HPP_NAMESPACE::CopyTensorInfoARM & copyTensorInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkCmdCopyTensorARM && "Function <vkCmdCopyTensorARM> requires <VK_ARM_tensors>" );
#endif
d.vkCmdCopyTensorARM( m_commandBuffer, reinterpret_cast<const VkCopyTensorInfoARM *>( &copyTensorInfo ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkGetPhysicalDeviceExternalTensorPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalTensorPropertiesARM.html
template <typename Dispatch>
VULKAN_HPP_INLINE void PhysicalDevice::getExternalTensorPropertiesARM( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM * pExternalTensorInfo, VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM * pExternalTensorProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
d.vkGetPhysicalDeviceExternalTensorPropertiesARM( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalTensorInfoARM *>( pExternalTensorInfo ), reinterpret_cast<VkExternalTensorPropertiesARM *>( pExternalTensorProperties ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkGetPhysicalDeviceExternalTensorPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalTensorPropertiesARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM PhysicalDevice::getExternalTensorPropertiesARM( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM & externalTensorInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceExternalTensorPropertiesARM && "Function <vkGetPhysicalDeviceExternalTensorPropertiesARM> requires <VK_ARM_tensors>" );
#endif
VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM externalTensorProperties;
d.vkGetPhysicalDeviceExternalTensorPropertiesARM( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalTensorInfoARM *>( &externalTensorInfo ), reinterpret_cast<VkExternalTensorPropertiesARM *>( &externalTensorProperties ) );
return externalTensorProperties;
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkGetTensorOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDescriptorDataARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getTensorOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetTensorOpaqueCaptureDescriptorDataARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorCaptureDescriptorDataInfoARM *>( pInfo ), pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkGetTensorOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDescriptorDataARM.html
template <typename DataType, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getTensorOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM & info, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkGetTensorOpaqueCaptureDescriptorDataARM && "Function <vkGetTensorOpaqueCaptureDescriptorDataARM> requires <VK_ARM_tensors>" );
#endif
DataType data;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetTensorOpaqueCaptureDescriptorDataARM( m_device, reinterpret_cast<const VkTensorCaptureDescriptorDataInfoARM *>( &info ), &data ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getTensorOpaqueCaptureDescriptorDataARM" );
return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkGetTensorViewOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorViewOpaqueCaptureDescriptorDataARM.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getTensorViewOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkGetTensorViewOpaqueCaptureDescriptorDataARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorViewCaptureDescriptorDataInfoARM *>( pInfo ), pData ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkGetTensorViewOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorViewOpaqueCaptureDescriptorDataARM.html
template <typename DataType, typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getTensorViewOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM & info, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkGetTensorViewOpaqueCaptureDescriptorDataARM && "Function <vkGetTensorViewOpaqueCaptureDescriptorDataARM> requires <VK_ARM_tensors>" );
#endif
DataType data;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkGetTensorViewOpaqueCaptureDescriptorDataARM( m_device, reinterpret_cast<const VkTensorViewCaptureDescriptorDataInfoARM *>( &info ), &data ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getTensorViewOpaqueCaptureDescriptorDataARM" );
return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_EXT_shader_module_identifier ===
@ -23423,6 +23863,36 @@ VULKAN_HPP_ASSERT( d.vkAntiLagUpdateAMD && "Function <vkAntiLagUpdateAMD> requir
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_KHR_present_wait2 ===
// wrapper function for command vkWaitForPresent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresent2KHR.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresent2KHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR * pPresentWait2Info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
return static_cast<Result>( d.vkWaitForPresent2KHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkPresentWait2InfoKHR *>( pPresentWait2Info ) ) );
}
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkWaitForPresent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresent2KHR.html
template <typename Dispatch>
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForPresent2KHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR & presentWait2Info, Dispatch const & d ) const
{
VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 )
VULKAN_HPP_ASSERT( d.vkWaitForPresent2KHR && "Function <vkWaitForPresent2KHR> requires <VK_KHR_present_wait2>" );
#endif
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( d.vkWaitForPresent2KHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkPresentWait2InfoKHR *>( &presentWait2Info ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresent2KHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */

View file

@ -1697,6 +1697,32 @@ namespace VULKAN_HPP_NAMESPACE
struct DirectDriverLoadingInfoLUNARG;
struct DirectDriverLoadingListLUNARG;
//=== VK_ARM_tensors ===
struct TensorDescriptionARM;
struct TensorCreateInfoARM;
struct TensorViewCreateInfoARM;
struct TensorMemoryRequirementsInfoARM;
struct BindTensorMemoryInfoARM;
struct WriteDescriptorSetTensorARM;
struct TensorFormatPropertiesARM;
struct PhysicalDeviceTensorPropertiesARM;
struct TensorMemoryBarrierARM;
struct TensorDependencyInfoARM;
struct PhysicalDeviceTensorFeaturesARM;
struct DeviceTensorMemoryRequirementsARM;
struct CopyTensorInfoARM;
struct TensorCopyARM;
struct MemoryDedicatedAllocateInfoTensorARM;
struct PhysicalDeviceExternalTensorInfoARM;
struct ExternalTensorPropertiesARM;
struct ExternalMemoryTensorCreateInfoARM;
struct PhysicalDeviceDescriptorBufferTensorFeaturesARM;
struct PhysicalDeviceDescriptorBufferTensorPropertiesARM;
struct DescriptorGetTensorInfoARM;
struct TensorCaptureDescriptorDataInfoARM;
struct TensorViewCaptureDescriptorDataInfoARM;
struct FrameBoundaryTensorsARM;
//=== VK_EXT_shader_module_identifier ===
struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT;
struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
@ -1731,6 +1757,16 @@ namespace VULKAN_HPP_NAMESPACE
struct AntiLagDataAMD;
struct AntiLagPresentationInfoAMD;
//=== VK_KHR_present_id2 ===
struct SurfaceCapabilitiesPresentId2KHR;
struct PresentId2KHR;
struct PhysicalDevicePresentId2FeaturesKHR;
//=== VK_KHR_present_wait2 ===
struct SurfaceCapabilitiesPresentWait2KHR;
struct PhysicalDevicePresentWait2FeaturesKHR;
struct PresentWait2InfoKHR;
//=== VK_KHR_ray_tracing_position_fetch ===
struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR;
@ -1851,6 +1887,12 @@ namespace VULKAN_HPP_NAMESPACE
struct VideoEncodeAV1RateControlInfoKHR;
struct VideoEncodeAV1RateControlLayerInfoKHR;
//=== VK_KHR_video_decode_vp9 ===
struct PhysicalDeviceVideoDecodeVP9FeaturesKHR;
struct VideoDecodeVP9ProfileInfoKHR;
struct VideoDecodeVP9CapabilitiesKHR;
struct VideoDecodeVP9PictureInfoKHR;
//=== VK_KHR_video_maintenance1 ===
struct PhysicalDeviceVideoMaintenance1FeaturesKHR;
struct VideoInlineQueryInfoKHR;
@ -1878,6 +1920,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_attachment_feedback_loop_dynamic_state ===
struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;
//=== VK_KHR_unified_image_layouts ===
struct PhysicalDeviceUnifiedImageLayoutsFeaturesKHR;
struct AttachmentFeedbackLoopInfoEXT;
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_external_memory_screen_buffer ===
struct ScreenBufferPropertiesQNX;
@ -1952,6 +1998,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_shader_replicated_composites ===
struct PhysicalDeviceShaderReplicatedCompositesFeaturesEXT;
//=== VK_EXT_shader_float8 ===
struct PhysicalDeviceShaderFloat8FeaturesEXT;
//=== VK_NV_ray_tracing_validation ===
struct PhysicalDeviceRayTracingValidationFeaturesNV;
@ -2024,6 +2073,11 @@ namespace VULKAN_HPP_NAMESPACE
struct PipelineViewportDepthClampControlCreateInfoEXT;
struct DepthClampRangeEXT;
//=== VK_KHR_maintenance9 ===
struct PhysicalDeviceMaintenance9FeaturesKHR;
struct PhysicalDeviceMaintenance9PropertiesKHR;
struct QueueFamilyOwnershipTransferPropertiesKHR;
//=== VK_KHR_video_maintenance2 ===
struct PhysicalDeviceVideoMaintenance2FeaturesKHR;
struct VideoDecodeH264InlineSessionParametersInfoKHR;
@ -2180,6 +2234,10 @@ class BufferCollectionFUCHSIA;
//=== VK_EXT_opacity_micromap ===
class MicromapEXT;
//=== VK_ARM_tensors ===
class TensorARM;
class TensorViewARM;
//=== VK_NV_optical_flow ===
class OpticalFlowSessionNV;
@ -2565,6 +2623,22 @@ class IndirectExecutionSetEXT;
};
using UniqueMicromapEXT = UniqueHandle<MicromapEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
//=== VK_ARM_tensors ===
template <typename Dispatch>
class UniqueHandleTraits<TensorARM, Dispatch>
{
public:
using deleter = detail::ObjectDestroy<Device, Dispatch>;
};
using UniqueTensorARM = UniqueHandle<TensorARM, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
template <typename Dispatch>
class UniqueHandleTraits<TensorViewARM, Dispatch>
{
public:
using deleter = detail::ObjectDestroy<Device, Dispatch>;
};
using UniqueTensorViewARM = UniqueHandle<TensorViewARM, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
//=== VK_NV_optical_flow ===
template <typename Dispatch>
class UniqueHandleTraits<OpticalFlowSessionNV, Dispatch>
@ -6849,6 +6923,18 @@ class IndirectExecutionSetEXT;
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
//=== VK_ARM_tensors ===
// wrapper function for command vkCmdCopyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyTensorARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyTensorARM( const VULKAN_HPP_NAMESPACE::CopyTensorInfoARM * pCopyTensorInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkCmdCopyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyTensorARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void copyTensorARM( const VULKAN_HPP_NAMESPACE::CopyTensorInfoARM & copyTensorInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_NV_optical_flow ===
@ -9302,6 +9388,188 @@ class IndirectExecutionSetEXT;
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
// wrapper class for handle VkTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorARM.html
class TensorARM
{
public:
using CType = VkTensorARM;
using NativeType = VkTensorARM;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eTensorARM;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
TensorARM() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue
TensorARM( TensorARM const & rhs ) = default;
TensorARM & operator=( TensorARM const & rhs ) = default;
#if !defined(VULKAN_HPP_HANDLES_MOVE_EXCHANGE)
TensorARM( TensorARM && rhs ) = default;
TensorARM & operator=( TensorARM && rhs ) = default;
#else
TensorARM( TensorARM && rhs ) VULKAN_HPP_NOEXCEPT
: m_tensorARM( VULKAN_HPP_NAMESPACE::exchange( rhs.m_tensorARM, {} ) )
{}
TensorARM & operator=( TensorARM && rhs ) VULKAN_HPP_NOEXCEPT
{
m_tensorARM = VULKAN_HPP_NAMESPACE::exchange( rhs.m_tensorARM, {} );
return *this;
}
#endif
VULKAN_HPP_CONSTEXPR TensorARM( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{}
VULKAN_HPP_TYPESAFE_EXPLICIT TensorARM( VkTensorARM tensorARM ) VULKAN_HPP_NOEXCEPT
: m_tensorARM( tensorARM )
{}
#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 )
TensorARM & operator=(VkTensorARM tensorARM) VULKAN_HPP_NOEXCEPT
{
m_tensorARM = tensorARM;
return *this;
}
#endif
TensorARM & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
m_tensorARM = {};
return *this;
}
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkTensorARM() const VULKAN_HPP_NOEXCEPT
{
return m_tensorARM;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
return m_tensorARM != VK_NULL_HANDLE;
}
bool operator!() const VULKAN_HPP_NOEXCEPT
{
return m_tensorARM == VK_NULL_HANDLE;
}
private:
VkTensorARM m_tensorARM = {};
};
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eTensorARM>
{
using Type = VULKAN_HPP_NAMESPACE::TensorARM;
};
#if ( VK_USE_64_BIT_PTR_DEFINES == 1 )
template <>
struct CppType<VkTensorARM, VK_NULL_HANDLE>
{
using Type = VULKAN_HPP_NAMESPACE::TensorARM;
};
#endif
template <>
struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::TensorARM>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
// wrapper class for handle VkTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewARM.html
class TensorViewARM
{
public:
using CType = VkTensorViewARM;
using NativeType = VkTensorViewARM;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eTensorViewARM;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
TensorViewARM() VULKAN_HPP_NOEXCEPT {} // = default; - try to workaround a compiler issue
TensorViewARM( TensorViewARM const & rhs ) = default;
TensorViewARM & operator=( TensorViewARM const & rhs ) = default;
#if !defined(VULKAN_HPP_HANDLES_MOVE_EXCHANGE)
TensorViewARM( TensorViewARM && rhs ) = default;
TensorViewARM & operator=( TensorViewARM && rhs ) = default;
#else
TensorViewARM( TensorViewARM && rhs ) VULKAN_HPP_NOEXCEPT
: m_tensorViewARM( VULKAN_HPP_NAMESPACE::exchange( rhs.m_tensorViewARM, {} ) )
{}
TensorViewARM & operator=( TensorViewARM && rhs ) VULKAN_HPP_NOEXCEPT
{
m_tensorViewARM = VULKAN_HPP_NAMESPACE::exchange( rhs.m_tensorViewARM, {} );
return *this;
}
#endif
VULKAN_HPP_CONSTEXPR TensorViewARM( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{}
VULKAN_HPP_TYPESAFE_EXPLICIT TensorViewARM( VkTensorViewARM tensorViewARM ) VULKAN_HPP_NOEXCEPT
: m_tensorViewARM( tensorViewARM )
{}
#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 )
TensorViewARM & operator=(VkTensorViewARM tensorViewARM) VULKAN_HPP_NOEXCEPT
{
m_tensorViewARM = tensorViewARM;
return *this;
}
#endif
TensorViewARM & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
{
m_tensorViewARM = {};
return *this;
}
VULKAN_HPP_TYPESAFE_EXPLICIT operator VkTensorViewARM() const VULKAN_HPP_NOEXCEPT
{
return m_tensorViewARM;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
return m_tensorViewARM != VK_NULL_HANDLE;
}
bool operator!() const VULKAN_HPP_NOEXCEPT
{
return m_tensorViewARM == VK_NULL_HANDLE;
}
private:
VkTensorViewARM m_tensorViewARM = {};
};
template <>
struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eTensorViewARM>
{
using Type = VULKAN_HPP_NAMESPACE::TensorViewARM;
};
#if ( VK_USE_64_BIT_PTR_DEFINES == 1 )
template <>
struct CppType<VkTensorViewARM, VK_NULL_HANDLE>
{
using Type = VULKAN_HPP_NAMESPACE::TensorViewARM;
};
#endif
template <>
struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::TensorViewARM>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
// wrapper class for handle VkValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationCacheEXT.html
class ValidationCacheEXT
{
@ -13795,6 +14063,134 @@ class IndirectExecutionSetEXT;
VULKAN_HPP_NAMESPACE::DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_ARM_tensors ===
// wrapper function for command vkCreateTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result createTensorARM( const VULKAN_HPP_NAMESPACE::TensorCreateInfoARM * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::TensorARM * pTensor, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkCreateTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::TensorARM>::type createTensorARM( const VULKAN_HPP_NAMESPACE::TensorCreateInfoARM & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
// wrapper function for command vkCreateTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::TensorARM, Dispatch>>::type createTensorARMUnique( const VULKAN_HPP_NAMESPACE::TensorCreateInfoARM & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyTensorARM( VULKAN_HPP_NAMESPACE::TensorARM tensor, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyTensorARM( VULKAN_HPP_NAMESPACE::TensorARM tensor VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::TensorARM tensor, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkDestroyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::TensorARM tensor, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkCreateTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorViewARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result createTensorViewARM( const VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::TensorViewARM * pView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkCreateTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorViewARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<VULKAN_HPP_NAMESPACE::TensorViewARM>::type createTensorViewARM( const VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# ifndef VULKAN_HPP_NO_SMART_HANDLE
// wrapper function for command vkCreateTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorViewARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::TensorViewARM, Dispatch>>::type createTensorViewARMUnique( const VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
# endif /* VULKAN_HPP_NO_SMART_HANDLE */
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyTensorViewARM( VULKAN_HPP_NAMESPACE::TensorViewARM tensorView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroyTensorViewARM( VULKAN_HPP_NAMESPACE::TensorViewARM tensorView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::TensorViewARM tensorView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkDestroyTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkDestroyTensorViewARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void destroy( VULKAN_HPP_NAMESPACE::TensorViewARM tensorView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkGetTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkGetTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
// wrapper function for command vkGetTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkBindTensorMemoryARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindTensorMemoryARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result bindTensorMemoryARM( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM * pBindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkBindTensorMemoryARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindTensorMemoryARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindTensorMemoryARM( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
// wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html
template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkGetTensorOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDescriptorDataARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result getTensorOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM * pInfo, void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkGetTensorOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDescriptorDataARM.html
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getTensorOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
// wrapper function for command vkGetTensorViewOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorViewOpaqueCaptureDescriptorDataARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result getTensorViewOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM * pInfo, void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkGetTensorViewOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorViewOpaqueCaptureDescriptorDataARM.html
template <typename DataType, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD typename ResultValueType<DataType>::type getTensorViewOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_EXT_shader_module_identifier ===
@ -13915,6 +14311,18 @@ class IndirectExecutionSetEXT;
void antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_KHR_present_wait2 ===
// wrapper function for command vkWaitForPresent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresent2KHR.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD Result waitForPresent2KHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR * pPresentWait2Info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkWaitForPresent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresent2KHR.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresent2KHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR & presentWait2Info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_EXT_shader_object ===
@ -15564,6 +15972,18 @@ class IndirectExecutionSetEXT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_ARM_tensors ===
// wrapper function for command vkGetPhysicalDeviceExternalTensorPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalTensorPropertiesARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
void getExternalTensorPropertiesARM( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM * pExternalTensorInfo, VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM * pExternalTensorProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
// wrapper function for command vkGetPhysicalDeviceExternalTensorPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalTensorPropertiesARM.html
template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM getExternalTensorPropertiesARM( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM & externalTensorInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
//=== VK_NV_optical_flow ===

View file

@ -458,6 +458,24 @@ namespace std
}
};
//=== VK_ARM_tensors ===
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorARM const & tensorARM) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkTensorARM>{}(static_cast<VkTensorARM>(tensorARM));
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorViewARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorViewARM const & tensorViewARM) const VULKAN_HPP_NOEXCEPT
{
return std::hash<VkTensorViewARM>{}(static_cast<VkTensorViewARM>(tensorViewARM));
}
};
//=== VK_NV_optical_flow ===
template <> struct hash<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>
@ -1112,6 +1130,18 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::AttachmentFeedbackLoopInfoEXT>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::AttachmentFeedbackLoopInfoEXT const & attachmentFeedbackLoopInfoEXT) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, attachmentFeedbackLoopInfoEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, attachmentFeedbackLoopInfoEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, attachmentFeedbackLoopInfoEXT.feedbackLoopEnable );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::AttachmentReference>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::AttachmentReference const & attachmentReference) const VULKAN_HPP_NOEXCEPT
@ -1562,6 +1592,20 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM const & bindTensorMemoryInfoARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, bindTensorMemoryInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, bindTensorMemoryInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, bindTensorMemoryInfoARM.tensor );
VULKAN_HPP_HASH_COMBINE( seed, bindTensorMemoryInfoARM.memory );
VULKAN_HPP_HASH_COMBINE( seed, bindTensorMemoryInfoARM.memoryOffset );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandEXT>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandEXT const & bindVertexBufferIndirectCommandEXT) const VULKAN_HPP_NOEXCEPT
@ -2881,6 +2925,36 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorCopyARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorCopyARM const & tensorCopyARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, tensorCopyARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, tensorCopyARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, tensorCopyARM.dimensionCount );
VULKAN_HPP_HASH_COMBINE( seed, tensorCopyARM.pSrcOffset );
VULKAN_HPP_HASH_COMBINE( seed, tensorCopyARM.pDstOffset );
VULKAN_HPP_HASH_COMBINE( seed, tensorCopyARM.pExtent );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::CopyTensorInfoARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::CopyTensorInfoARM const & copyTensorInfoARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, copyTensorInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, copyTensorInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, copyTensorInfoARM.srcTensor );
VULKAN_HPP_HASH_COMBINE( seed, copyTensorInfoARM.dstTensor );
VULKAN_HPP_HASH_COMBINE( seed, copyTensorInfoARM.regionCount );
VULKAN_HPP_HASH_COMBINE( seed, copyTensorInfoARM.pRegions );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & cuFunctionCreateInfoNVX) const VULKAN_HPP_NOEXCEPT
@ -3401,6 +3475,18 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorGetTensorInfoARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorGetTensorInfoARM const & descriptorGetTensorInfoARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, descriptorGetTensorInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, descriptorGetTensorInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, descriptorGetTensorInfoARM.tensorView );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorPoolSize const & descriptorPoolSize) const VULKAN_HPP_NOEXCEPT
@ -4125,6 +4211,51 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorDescriptionARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorDescriptionARM const & tensorDescriptionARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, tensorDescriptionARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, tensorDescriptionARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, tensorDescriptionARM.tiling );
VULKAN_HPP_HASH_COMBINE( seed, tensorDescriptionARM.format );
VULKAN_HPP_HASH_COMBINE( seed, tensorDescriptionARM.dimensionCount );
VULKAN_HPP_HASH_COMBINE( seed, tensorDescriptionARM.pDimensions );
VULKAN_HPP_HASH_COMBINE( seed, tensorDescriptionARM.pStrides );
VULKAN_HPP_HASH_COMBINE( seed, tensorDescriptionARM.usage );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorCreateInfoARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorCreateInfoARM const & tensorCreateInfoARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, tensorCreateInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, tensorCreateInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, tensorCreateInfoARM.flags );
VULKAN_HPP_HASH_COMBINE( seed, tensorCreateInfoARM.pDescription );
VULKAN_HPP_HASH_COMBINE( seed, tensorCreateInfoARM.sharingMode );
VULKAN_HPP_HASH_COMBINE( seed, tensorCreateInfoARM.queueFamilyIndexCount );
VULKAN_HPP_HASH_COMBINE( seed, tensorCreateInfoARM.pQueueFamilyIndices );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM const & deviceTensorMemoryRequirementsARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, deviceTensorMemoryRequirementsARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, deviceTensorMemoryRequirementsARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, deviceTensorMemoryRequirementsARM.pCreateInfo );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG const & directDriverLoadingInfoLUNARG) const VULKAN_HPP_NOEXCEPT
@ -5035,6 +5166,18 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalFormatQNX>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalMemoryTensorCreateInfoARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalMemoryTensorCreateInfoARM const & externalMemoryTensorCreateInfoARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalMemoryTensorCreateInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, externalMemoryTensorCreateInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, externalMemoryTensorCreateInfoARM.handleTypes );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const & externalSemaphoreProperties) const VULKAN_HPP_NOEXCEPT
@ -5049,6 +5192,18 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalFormatQNX>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM const & externalTensorPropertiesARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, externalTensorPropertiesARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, externalTensorPropertiesARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, externalTensorPropertiesARM.externalMemoryProperties );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::FenceCreateInfo>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::FenceCreateInfo const & fenceCreateInfo) const VULKAN_HPP_NOEXCEPT
@ -5173,6 +5328,19 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::FrameBoundaryTensorsARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::FrameBoundaryTensorsARM const & frameBoundaryTensorsARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryTensorsARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryTensorsARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryTensorsARM.tensorCount );
VULKAN_HPP_HASH_COMBINE( seed, frameBoundaryTensorsARM.pTensors );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const & framebufferAttachmentImageInfo) const VULKAN_HPP_NOEXCEPT
@ -6882,6 +7050,18 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoTensorARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoTensorARM const & memoryDedicatedAllocateInfoTensorARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfoTensorARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfoTensorARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, memoryDedicatedAllocateInfoTensorARM.tensor );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const & memoryDedicatedRequirements) const VULKAN_HPP_NOEXCEPT
@ -8406,6 +8586,32 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceCudaKernelLaunchProp
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorFeaturesARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorFeaturesARM const & physicalDeviceDescriptorBufferTensorFeaturesARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferTensorFeaturesARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferTensorFeaturesARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferTensorFeaturesARM.descriptorBufferTensorDescriptors );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorPropertiesARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorPropertiesARM const & physicalDeviceDescriptorBufferTensorPropertiesARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferTensorPropertiesARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferTensorPropertiesARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferTensorPropertiesARM.tensorCaptureReplayDescriptorDataSize );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferTensorPropertiesARM.tensorViewCaptureReplayDescriptorDataSize );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDescriptorBufferTensorPropertiesARM.tensorDescriptorSize );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const & physicalDeviceDescriptorIndexingFeatures) const VULKAN_HPP_NOEXCEPT
@ -8964,6 +9170,20 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreen
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM const & physicalDeviceExternalTensorInfoARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalTensorInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalTensorInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalTensorInfoARM.flags );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalTensorInfoARM.pDescription );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceExternalTensorInfoARM.handleType );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT const & physicalDeviceFaultFeaturesEXT) const VULKAN_HPP_NOEXCEPT
@ -10094,6 +10314,31 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreen
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9FeaturesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9FeaturesKHR const & physicalDeviceMaintenance9FeaturesKHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance9FeaturesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance9FeaturesKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance9FeaturesKHR.maintenance9 );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9PropertiesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9PropertiesKHR const & physicalDeviceMaintenance9PropertiesKHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance9PropertiesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance9PropertiesKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance9PropertiesKHR.image2DViewOf3DSparse );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance9PropertiesKHR.defaultVertexAttributeValue );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT const & physicalDeviceMapMemoryPlacedFeaturesEXT) const VULKAN_HPP_NOEXCEPT
@ -10809,6 +11054,18 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPro
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentId2FeaturesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePresentId2FeaturesKHR const & physicalDevicePresentId2FeaturesKHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentId2FeaturesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentId2FeaturesKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentId2FeaturesKHR.presentId2 );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR const & physicalDevicePresentIdFeaturesKHR) const VULKAN_HPP_NOEXCEPT
@ -10847,6 +11104,18 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentMeteringFeatu
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWait2FeaturesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWait2FeaturesKHR const & physicalDevicePresentWait2FeaturesKHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWait2FeaturesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWait2FeaturesKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentWait2FeaturesKHR.presentWait2 );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR const & physicalDevicePresentWaitFeaturesKHR) const VULKAN_HPP_NOEXCEPT
@ -11594,6 +11863,19 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropert
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat8FeaturesEXT>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat8FeaturesEXT const & physicalDeviceShaderFloat8FeaturesEXT) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat8FeaturesEXT.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat8FeaturesEXT.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat8FeaturesEXT.shaderFloat8 );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloat8FeaturesEXT.shaderFloat8CooperativeMatrix );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2Features>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2Features const & physicalDeviceShaderFloatControls2Features) const VULKAN_HPP_NOEXCEPT
@ -12059,6 +12341,47 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropert
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorFeaturesARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorFeaturesARM const & physicalDeviceTensorFeaturesARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorFeaturesARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorFeaturesARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorFeaturesARM.tensorNonPacked );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorFeaturesARM.shaderTensorAccess );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorFeaturesARM.shaderStorageTensorArrayDynamicIndexing );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorFeaturesARM.shaderStorageTensorArrayNonUniformIndexing );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorFeaturesARM.descriptorBindingStorageTensorUpdateAfterBind );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorFeaturesARM.tensors );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorPropertiesARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorPropertiesARM const & physicalDeviceTensorPropertiesARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.maxTensorDimensionCount );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.maxTensorElements );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.maxPerDimensionTensorElements );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.maxTensorStride );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.maxTensorSize );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.maxTensorShaderAccessArrayLength );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.maxTensorShaderAccessSize );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.maxDescriptorSetStorageTensors );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.maxPerStageDescriptorSetStorageTensors );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.maxDescriptorSetUpdateAfterBindStorageTensors );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.maxPerStageDescriptorUpdateAfterBindStorageTensors );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.shaderStorageTensorArrayNonUniformIndexingNative );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceTensorPropertiesARM.shaderTensorSupportedStages );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & physicalDeviceTexelBufferAlignmentFeaturesEXT) const VULKAN_HPP_NOEXCEPT
@ -12261,6 +12584,19 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropert
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceUnifiedImageLayoutsFeaturesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceUnifiedImageLayoutsFeaturesKHR const & physicalDeviceUnifiedImageLayoutsFeaturesKHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUnifiedImageLayoutsFeaturesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUnifiedImageLayoutsFeaturesKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUnifiedImageLayoutsFeaturesKHR.unifiedImageLayouts );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceUnifiedImageLayoutsFeaturesKHR.unifiedImageLayoutsVideo );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const & physicalDeviceUniformBufferStandardLayoutFeatures) const VULKAN_HPP_NOEXCEPT
@ -12348,6 +12684,18 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropert
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoDecodeVP9FeaturesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoDecodeVP9FeaturesKHR const & physicalDeviceVideoDecodeVP9FeaturesKHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoDecodeVP9FeaturesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoDecodeVP9FeaturesKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoDecodeVP9FeaturesKHR.videoDecodeVP9 );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeAV1FeaturesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeAV1FeaturesKHR const & physicalDeviceVideoEncodeAV1FeaturesKHR) const VULKAN_HPP_NOEXCEPT
@ -13632,6 +13980,19 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>
};
#endif /*VK_USE_PLATFORM_GGP*/
template <> struct hash<VULKAN_HPP_NAMESPACE::PresentId2KHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PresentId2KHR const & presentId2KHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, presentId2KHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, presentId2KHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, presentId2KHR.swapchainCount );
VULKAN_HPP_HASH_COMBINE( seed, presentId2KHR.pPresentIds );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PresentIdKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PresentIdKHR const & presentIdKHR) const VULKAN_HPP_NOEXCEPT
@ -13722,6 +14083,19 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR const & presentWait2InfoKHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, presentWait2InfoKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, presentWait2InfoKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, presentWait2InfoKHR.presentId );
VULKAN_HPP_HASH_COMBINE( seed, presentWait2InfoKHR.timeout );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & privateDataSlotCreateInfo) const VULKAN_HPP_NOEXCEPT
@ -13917,6 +14291,18 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyOwnershipTransferPropertiesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::QueueFamilyOwnershipTransferPropertiesKHR const & queueFamilyOwnershipTransferPropertiesKHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, queueFamilyOwnershipTransferPropertiesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, queueFamilyOwnershipTransferPropertiesKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, queueFamilyOwnershipTransferPropertiesKHR.optimalImageTransferToQueueFamilies );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::QueueFamilyProperties const & queueFamilyProperties) const VULKAN_HPP_NOEXCEPT
@ -15312,6 +15698,30 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclu
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentId2KHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentId2KHR const & surfaceCapabilitiesPresentId2KHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentId2KHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentId2KHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentId2KHR.presentId2Supported );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentWait2KHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentWait2KHR const & surfaceCapabilitiesPresentWait2KHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentWait2KHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentWait2KHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, surfaceCapabilitiesPresentWait2KHR.presentWait2Supported );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const & surfaceFormatKHR) const VULKAN_HPP_NOEXCEPT
@ -15544,6 +15954,100 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32Inf
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM const & tensorCaptureDescriptorDataInfoARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, tensorCaptureDescriptorDataInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, tensorCaptureDescriptorDataInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, tensorCaptureDescriptorDataInfoARM.tensor );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM const & tensorMemoryBarrierARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryBarrierARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryBarrierARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryBarrierARM.srcStageMask );
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryBarrierARM.srcAccessMask );
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryBarrierARM.dstStageMask );
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryBarrierARM.dstAccessMask );
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryBarrierARM.srcQueueFamilyIndex );
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryBarrierARM.dstQueueFamilyIndex );
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryBarrierARM.tensor );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorDependencyInfoARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorDependencyInfoARM const & tensorDependencyInfoARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, tensorDependencyInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, tensorDependencyInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, tensorDependencyInfoARM.tensorMemoryBarrierCount );
VULKAN_HPP_HASH_COMBINE( seed, tensorDependencyInfoARM.pTensorMemoryBarriers );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorFormatPropertiesARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorFormatPropertiesARM const & tensorFormatPropertiesARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, tensorFormatPropertiesARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, tensorFormatPropertiesARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, tensorFormatPropertiesARM.optimalTilingTensorFeatures );
VULKAN_HPP_HASH_COMBINE( seed, tensorFormatPropertiesARM.linearTilingTensorFeatures );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM const & tensorMemoryRequirementsInfoARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryRequirementsInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryRequirementsInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, tensorMemoryRequirementsInfoARM.tensor );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM const & tensorViewCaptureDescriptorDataInfoARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, tensorViewCaptureDescriptorDataInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, tensorViewCaptureDescriptorDataInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, tensorViewCaptureDescriptorDataInfoARM.tensorView );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM const & tensorViewCreateInfoARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.flags );
VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.tensor );
VULKAN_HPP_HASH_COMBINE( seed, tensorViewCreateInfoARM.format );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const & textureLODGatherFormatPropertiesAMD) const VULKAN_HPP_NOEXCEPT
@ -16134,6 +16638,49 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeVP9CapabilitiesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeVP9CapabilitiesKHR const & videoDecodeVP9CapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9CapabilitiesKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9CapabilitiesKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9CapabilitiesKHR.maxLevel );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeVP9PictureInfoKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeVP9PictureInfoKHR const & videoDecodeVP9PictureInfoKHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9PictureInfoKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9PictureInfoKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9PictureInfoKHR.pStdPictureInfo );
for ( size_t i = 0; i < VK_MAX_VIDEO_VP9_REFERENCES_PER_FRAME_KHR; ++i )
{
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9PictureInfoKHR.referenceNameSlotIndices[i] );
}
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9PictureInfoKHR.uncompressedHeaderOffset );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9PictureInfoKHR.compressedHeaderOffset );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9PictureInfoKHR.tilesOffset );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::VideoDecodeVP9ProfileInfoKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoDecodeVP9ProfileInfoKHR const & videoDecodeVP9ProfileInfoKHR) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9ProfileInfoKHR.sType );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9ProfileInfoKHR.pNext );
VULKAN_HPP_HASH_COMBINE( seed, videoDecodeVP9ProfileInfoKHR.stdProfile );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilitiesKHR>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilitiesKHR const & videoEncodeAV1CapabilitiesKHR) const VULKAN_HPP_NOEXCEPT
@ -17307,6 +17854,19 @@ template <> struct hash<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::WriteDescriptorSetTensorARM>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::WriteDescriptorSetTensorARM const & writeDescriptorSetTensorARM) const VULKAN_HPP_NOEXCEPT
{
std::size_t seed = 0;
VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetTensorARM.sType );
VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetTensorARM.pNext );
VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetTensorARM.tensorViewCount );
VULKAN_HPP_HASH_COMBINE( seed, writeDescriptorSetTensorARM.pTensorViews );
return seed;
}
};
template <> struct hash<VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT>
{
std::size_t operator()(VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT const & writeIndirectExecutionSetPipelineEXT) const VULKAN_HPP_NOEXCEPT

View file

@ -302,6 +302,9 @@ namespace VULKAN_HPP_NAMESPACE
vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) );
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_ARM_tensors ===
vkGetPhysicalDeviceExternalTensorPropertiesARM = PFN_vkGetPhysicalDeviceExternalTensorPropertiesARM( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalTensorPropertiesARM" ) );
//=== VK_NV_optical_flow ===
vkGetPhysicalDeviceOpticalFlowImageFormatsNV = PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) );
@ -587,6 +590,9 @@ namespace VULKAN_HPP_NAMESPACE
PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0;
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_ARM_tensors ===
PFN_vkGetPhysicalDeviceExternalTensorPropertiesARM vkGetPhysicalDeviceExternalTensorPropertiesARM = 0;
//=== VK_NV_optical_flow ===
PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0;
@ -1495,6 +1501,18 @@ namespace VULKAN_HPP_NAMESPACE
vkCmdSetRepresentativeFragmentTestEnableNV = PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) );
//=== VK_ARM_tensors ===
vkCreateTensorARM = PFN_vkCreateTensorARM( vkGetDeviceProcAddr( device, "vkCreateTensorARM" ) );
vkDestroyTensorARM = PFN_vkDestroyTensorARM( vkGetDeviceProcAddr( device, "vkDestroyTensorARM" ) );
vkCreateTensorViewARM = PFN_vkCreateTensorViewARM( vkGetDeviceProcAddr( device, "vkCreateTensorViewARM" ) );
vkDestroyTensorViewARM = PFN_vkDestroyTensorViewARM( vkGetDeviceProcAddr( device, "vkDestroyTensorViewARM" ) );
vkGetTensorMemoryRequirementsARM = PFN_vkGetTensorMemoryRequirementsARM( vkGetDeviceProcAddr( device, "vkGetTensorMemoryRequirementsARM" ) );
vkBindTensorMemoryARM = PFN_vkBindTensorMemoryARM( vkGetDeviceProcAddr( device, "vkBindTensorMemoryARM" ) );
vkGetDeviceTensorMemoryRequirementsARM = PFN_vkGetDeviceTensorMemoryRequirementsARM( vkGetDeviceProcAddr( device, "vkGetDeviceTensorMemoryRequirementsARM" ) );
vkCmdCopyTensorARM = PFN_vkCmdCopyTensorARM( vkGetDeviceProcAddr( device, "vkCmdCopyTensorARM" ) );
vkGetTensorOpaqueCaptureDescriptorDataARM = PFN_vkGetTensorOpaqueCaptureDescriptorDataARM( vkGetDeviceProcAddr( device, "vkGetTensorOpaqueCaptureDescriptorDataARM" ) );
vkGetTensorViewOpaqueCaptureDescriptorDataARM = PFN_vkGetTensorViewOpaqueCaptureDescriptorDataARM( vkGetDeviceProcAddr( device, "vkGetTensorViewOpaqueCaptureDescriptorDataARM" ) );
//=== VK_EXT_shader_module_identifier ===
vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) );
vkGetShaderModuleCreateInfoIdentifierEXT = PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
@ -1518,6 +1536,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_AMD_anti_lag ===
vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) );
//=== VK_KHR_present_wait2 ===
vkWaitForPresent2KHR = PFN_vkWaitForPresent2KHR( vkGetDeviceProcAddr( device, "vkWaitForPresent2KHR" ) );
//=== VK_EXT_shader_object ===
vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );
@ -2464,6 +2485,18 @@ namespace VULKAN_HPP_NAMESPACE
PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0;
PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0;
//=== VK_ARM_tensors ===
PFN_vkCreateTensorARM vkCreateTensorARM = 0;
PFN_vkDestroyTensorARM vkDestroyTensorARM = 0;
PFN_vkCreateTensorViewARM vkCreateTensorViewARM = 0;
PFN_vkDestroyTensorViewARM vkDestroyTensorViewARM = 0;
PFN_vkGetTensorMemoryRequirementsARM vkGetTensorMemoryRequirementsARM = 0;
PFN_vkBindTensorMemoryARM vkBindTensorMemoryARM = 0;
PFN_vkGetDeviceTensorMemoryRequirementsARM vkGetDeviceTensorMemoryRequirementsARM = 0;
PFN_vkCmdCopyTensorARM vkCmdCopyTensorARM = 0;
PFN_vkGetTensorOpaqueCaptureDescriptorDataARM vkGetTensorOpaqueCaptureDescriptorDataARM = 0;
PFN_vkGetTensorViewOpaqueCaptureDescriptorDataARM vkGetTensorViewOpaqueCaptureDescriptorDataARM = 0;
//=== VK_EXT_shader_module_identifier ===
PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0;
PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0;
@ -2483,6 +2516,9 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_AMD_anti_lag ===
PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0;
//=== VK_KHR_present_wait2 ===
PFN_vkWaitForPresent2KHR vkWaitForPresent2KHR = 0;
//=== VK_EXT_shader_object ===
PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
@ -2684,6 +2720,10 @@ namespace VULKAN_HPP_NAMESPACE
//=== VK_EXT_opacity_micromap ===
class MicromapEXT;
//=== VK_ARM_tensors ===
class TensorARM;
class TensorViewARM;
//=== VK_NV_optical_flow ===
class OpticalFlowSessionNV;
@ -3461,6 +3501,11 @@ namespace VULKAN_HPP_NAMESPACE
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT;
#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
//=== VK_ARM_tensors ===
// wrapper function for command vkGetPhysicalDeviceExternalTensorPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalTensorPropertiesARM.html
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM getExternalTensorPropertiesARM( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM & externalTensorInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_optical_flow ===
// wrapper function for command vkGetPhysicalDeviceOpticalFlowImageFormatsNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceOpticalFlowImageFormatsNV.html
@ -4400,6 +4445,35 @@ namespace VULKAN_HPP_NAMESPACE
// wrapper function for command vkGetPipelineIndirectDeviceAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPipelineIndirectDeviceAddressNV.html
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
//=== VK_ARM_tensors ===
// wrapper function for command vkCreateTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorARM.html
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::TensorARM>::Type createTensorARM( VULKAN_HPP_NAMESPACE::TensorCreateInfoARM const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT;
// wrapper function for command vkCreateTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorViewARM.html
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::TensorViewARM>::Type createTensorViewARM( VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT;
// wrapper function for command vkGetTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM & info ) const VULKAN_HPP_NOEXCEPT;
// wrapper function for command vkGetTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html
template <typename X, typename Y, typename... Z> VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM & info ) const VULKAN_HPP_NOEXCEPT;
// wrapper function for command vkBindTensorMemoryARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindTensorMemoryARM.html
void bindTensorMemoryARM( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM> const & bindInfos ) const ;
// wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM & info ) const VULKAN_HPP_NOEXCEPT;
// wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html
template <typename X, typename Y, typename... Z> VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM & info ) const VULKAN_HPP_NOEXCEPT;
// wrapper function for command vkGetTensorOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDescriptorDataARM.html
template <typename DataType> VULKAN_HPP_NODISCARD DataType getTensorOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM & info ) const ;
// wrapper function for command vkGetTensorViewOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorViewOpaqueCaptureDescriptorDataARM.html
template <typename DataType> VULKAN_HPP_NODISCARD DataType getTensorViewOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM & info ) const ;
//=== VK_EXT_shader_module_identifier ===
// wrapper function for command vkGetShaderModuleCreateInfoIdentifierEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleCreateInfoIdentifierEXT.html
@ -6271,6 +6345,11 @@ namespace VULKAN_HPP_NAMESPACE
// wrapper function for command vkCmdSetCoverageReductionModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdSetCoverageReductionModeNV.html
void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT;
//=== VK_ARM_tensors ===
// wrapper function for command vkCmdCopyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyTensorARM.html
void copyTensorARM( const VULKAN_HPP_NAMESPACE::CopyTensorInfoARM & copyTensorInfo ) const VULKAN_HPP_NOEXCEPT;
//=== VK_NV_optical_flow ===
// wrapper function for command vkCmdOpticalFlowExecuteNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdOpticalFlowExecuteNV.html
@ -11955,6 +12034,11 @@ namespace VULKAN_HPP_NAMESPACE
void releaseFullScreenExclusiveModeEXT( ) const ;
#endif /*VK_USE_PLATFORM_WIN32_KHR*/
//=== VK_KHR_present_wait2 ===
// wrapper function for command vkWaitForPresent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresent2KHR.html
VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresent2( const VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR & presentWait2Info ) const ;
//=== VK_NV_low_latency2 ===
// wrapper function for command vkSetLatencySleepModeNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkSetLatencySleepModeNV.html
@ -12012,6 +12096,242 @@ namespace VULKAN_HPP_NAMESPACE
};
// wrapper class for handle VkTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorARM.html
class TensorARM
{
public:
using CType = VkTensorARM;
using CppType = VULKAN_HPP_NAMESPACE::TensorARM;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eTensorARM;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
#if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
TensorARM( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::TensorCreateInfoARM const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createTensorARM( createInfo, allocator );
}
#endif
TensorARM( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkTensorARM tensor, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device ), m_tensorARM( tensor ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
{}
TensorARM( std::nullptr_t ) {}
~TensorARM()
{
clear();
}
TensorARM() = delete;
TensorARM( TensorARM const & ) = delete;
TensorARM( TensorARM && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ), m_tensorARM( VULKAN_HPP_NAMESPACE::exchange( rhs.m_tensorARM, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{}
TensorARM & operator=( TensorARM const & ) = delete;
TensorARM & operator=( TensorARM && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_tensorARM, rhs.m_tensorARM );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::TensorARM const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_tensorARM;
}
operator VULKAN_HPP_NAMESPACE::TensorARM() const VULKAN_HPP_NOEXCEPT
{
return m_tensorARM;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_tensorARM )
{
getDispatcher()->vkDestroyTensorARM( static_cast<VkDevice>( m_device ), static_cast<VkTensorARM>( m_tensorARM ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_tensorARM = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::TensorARM release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_tensorARM, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::TensorARM & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_tensorARM, rhs.m_tensorARM );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::TensorARM m_tensorARM = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
template <>
struct isVulkanRAIIHandleType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::TensorARM>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
// wrapper class for handle VkTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorViewARM.html
class TensorViewARM
{
public:
using CType = VkTensorViewARM;
using CppType = VULKAN_HPP_NAMESPACE::TensorViewARM;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eTensorViewARM;
static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
public:
#if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
TensorViewARM( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
{
*this = device.createTensorViewARM( createInfo, allocator );
}
#endif
TensorViewARM( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkTensorViewARM tensorView, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
: m_device( device ), m_tensorViewARM( tensorView ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), m_dispatcher( device.getDispatcher() )
{}
TensorViewARM( std::nullptr_t ) {}
~TensorViewARM()
{
clear();
}
TensorViewARM() = delete;
TensorViewARM( TensorViewARM const & ) = delete;
TensorViewARM( TensorViewARM && rhs ) VULKAN_HPP_NOEXCEPT
: m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ), m_tensorViewARM( VULKAN_HPP_NAMESPACE::exchange( rhs.m_tensorViewARM, {} ) ), m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ), m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
{}
TensorViewARM & operator=( TensorViewARM const & ) = delete;
TensorViewARM & operator=( TensorViewARM && rhs ) VULKAN_HPP_NOEXCEPT
{
if ( this != &rhs )
{
std::swap( m_device, rhs.m_device );
std::swap( m_tensorViewARM, rhs.m_tensorViewARM );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
return *this;
}
VULKAN_HPP_NAMESPACE::TensorViewARM const & operator*() const VULKAN_HPP_NOEXCEPT
{
return m_tensorViewARM;
}
operator VULKAN_HPP_NAMESPACE::TensorViewARM() const VULKAN_HPP_NOEXCEPT
{
return m_tensorViewARM;
}
void clear() VULKAN_HPP_NOEXCEPT
{
if ( m_tensorViewARM )
{
getDispatcher()->vkDestroyTensorViewARM( static_cast<VkDevice>( m_device ), static_cast<VkTensorViewARM>( m_tensorViewARM ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
}
m_device = nullptr;
m_tensorViewARM = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
}
VULKAN_HPP_NAMESPACE::TensorViewARM release()
{
m_device = nullptr;
m_allocator = nullptr;
m_dispatcher = nullptr;
return VULKAN_HPP_NAMESPACE::exchange( m_tensorViewARM, nullptr );
}
VULKAN_HPP_NAMESPACE::Device getDevice() const
{
return m_device;
}
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const
{
VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
return m_dispatcher;
}
void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::TensorViewARM & rhs ) VULKAN_HPP_NOEXCEPT
{
std::swap( m_device, rhs.m_device );
std::swap( m_tensorViewARM, rhs.m_tensorViewARM );
std::swap( m_allocator, rhs.m_allocator );
std::swap( m_dispatcher, rhs.m_dispatcher );
}
private:
VULKAN_HPP_NAMESPACE::Device m_device = {};
VULKAN_HPP_NAMESPACE::TensorViewARM m_tensorViewARM = {};
const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr;
};
template <>
struct isVulkanRAIIHandleType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::TensorViewARM>
{
static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
};
// wrapper class for handle VkValidationCacheEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkValidationCacheEXT.html
class ValidationCacheEXT
{
@ -22846,6 +23166,161 @@ VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV && "Function
}
//=== VK_ARM_tensors ===
// wrapper function for command vkCreateTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorARM.html
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::TensorARM>::Type Device::createTensorARM( VULKAN_HPP_NAMESPACE::TensorCreateInfoARM const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::TensorARM tensor;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateTensorARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorCreateInfoARM *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkTensorARM *>( &tensor ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
#if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
#else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createTensorARM" );
#endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::TensorARM( *this, *reinterpret_cast<VkTensorARM *>( &tensor ), allocator );
}
// wrapper function for command vkCreateTensorViewARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateTensorViewARM.html
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::TensorViewARM>::Type Device::createTensorViewARM( VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM const & createInfo, VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT
{
VULKAN_HPP_NAMESPACE::TensorViewARM view;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateTensorViewARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorViewCreateInfoARM *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkTensorViewARM *>( &view ) ) );
if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
{
#if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS )
return VULKAN_HPP_UNEXPECTED( result );
#else
VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createTensorViewARM" );
#endif
}
return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::TensorViewARM( *this, *reinterpret_cast<VkTensorViewARM *>( &view ), allocator );
}
// wrapper function for command vkGetTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetTensorMemoryRequirementsARM && "Function <vkGetTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetTensorMemoryRequirementsARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorMemoryRequirementsInfoARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return memoryRequirements;
}
// wrapper function for command vkGetTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorMemoryRequirementsARM.html
template <typename X, typename Y, typename... Z> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetTensorMemoryRequirementsARM && "Function <vkGetTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetTensorMemoryRequirementsARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorMemoryRequirementsInfoARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return structureChain;
}
// wrapper function for command vkBindTensorMemoryARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkBindTensorMemoryARM.html
VULKAN_HPP_INLINE void Device::bindTensorMemoryARM( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM> const & bindInfos ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkBindTensorMemoryARM && "Function <vkBindTensorMemoryARM> requires <VK_ARM_tensors>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkBindTensorMemoryARM( static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindTensorMemoryInfoARM *>( bindInfos.data() ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindTensorMemoryARM" );
}
// wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceTensorMemoryRequirementsARM && "Function <vkGetDeviceTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" );
VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
getDispatcher()->vkGetDeviceTensorMemoryRequirementsARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return memoryRequirements;
}
// wrapper function for command vkGetDeviceTensorMemoryRequirementsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetDeviceTensorMemoryRequirementsARM.html
template <typename X, typename Y, typename... Z> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getTensorMemoryRequirementsARM( const VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM & info ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceTensorMemoryRequirementsARM && "Function <vkGetDeviceTensorMemoryRequirementsARM> requires <VK_ARM_tensors>" );
VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
getDispatcher()->vkGetDeviceTensorMemoryRequirementsARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceTensorMemoryRequirementsARM *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
return structureChain;
}
// wrapper function for command vkCmdCopyTensorARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCmdCopyTensorARM.html
VULKAN_HPP_INLINE void CommandBuffer::copyTensorARM( const VULKAN_HPP_NAMESPACE::CopyTensorInfoARM & copyTensorInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyTensorARM && "Function <vkCmdCopyTensorARM> requires <VK_ARM_tensors>" );
getDispatcher()->vkCmdCopyTensorARM( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyTensorInfoARM *>( &copyTensorInfo ) );
}
// wrapper function for command vkGetPhysicalDeviceExternalTensorPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetPhysicalDeviceExternalTensorPropertiesARM.html
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM PhysicalDevice::getExternalTensorPropertiesARM( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM & externalTensorInfo ) const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalTensorPropertiesARM && "Function <vkGetPhysicalDeviceExternalTensorPropertiesARM> requires <VK_ARM_tensors>" );
VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM externalTensorProperties;
getDispatcher()->vkGetPhysicalDeviceExternalTensorPropertiesARM( static_cast<VkPhysicalDevice>( m_physicalDevice ), reinterpret_cast<const VkPhysicalDeviceExternalTensorInfoARM *>( &externalTensorInfo ), reinterpret_cast<VkExternalTensorPropertiesARM *>( &externalTensorProperties ) );
return externalTensorProperties;
}
// wrapper function for command vkGetTensorOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorOpaqueCaptureDescriptorDataARM.html
template <typename DataType> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::getTensorOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetTensorOpaqueCaptureDescriptorDataARM && "Function <vkGetTensorOpaqueCaptureDescriptorDataARM> requires <VK_ARM_tensors>" );
DataType data;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetTensorOpaqueCaptureDescriptorDataARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorCaptureDescriptorDataInfoARM *>( &info ), &data ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getTensorOpaqueCaptureDescriptorDataARM" );
return data;
}
// wrapper function for command vkGetTensorViewOpaqueCaptureDescriptorDataARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetTensorViewOpaqueCaptureDescriptorDataARM.html
template <typename DataType> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::getTensorViewOpaqueCaptureDescriptorDataARM( const VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM & info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkGetTensorViewOpaqueCaptureDescriptorDataARM && "Function <vkGetTensorViewOpaqueCaptureDescriptorDataARM> requires <VK_ARM_tensors>" );
DataType data;
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkGetTensorViewOpaqueCaptureDescriptorDataARM( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkTensorViewCaptureDescriptorDataInfoARM *>( &info ), &data ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getTensorViewOpaqueCaptureDescriptorDataARM" );
return data;
}
//=== VK_EXT_shader_module_identifier ===
// wrapper function for command vkGetShaderModuleIdentifierEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkGetShaderModuleIdentifierEXT.html
@ -23043,6 +23518,21 @@ VULKAN_HPP_ASSERT( getDispatcher()->vkAntiLagUpdateAMD && "Function <vkAntiLagUp
}
//=== VK_KHR_present_wait2 ===
// wrapper function for command vkWaitForPresent2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkWaitForPresent2KHR.html
VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::waitForPresent2( const VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR & presentWait2Info ) const
{
VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForPresent2KHR && "Function <vkWaitForPresent2KHR> requires <VK_KHR_present_wait2>" );
VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkWaitForPresent2KHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchainKHR ), reinterpret_cast<const VkPresentWait2InfoKHR *>( &presentWait2Info ) ) );
VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent2", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
}
//=== VK_EXT_shader_object ===
// wrapper function for command vkCreateShadersEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/vkCreateShadersEXT.html

View file

@ -874,6 +874,24 @@ private:
};
using SharedMicromapEXT = SharedHandle<MicromapEXT>;
//=== VK_ARM_tensors ===
template <>
class SharedHandleTraits<TensorARM>
{
public:
using DestructorType = Device;
using deleter = detail::ObjectDestroyShared<TensorARM>;
};
using SharedTensorARM = SharedHandle<TensorARM>;
template <>
class SharedHandleTraits<TensorViewARM>
{
public:
using DestructorType = Device;
using deleter = detail::ObjectDestroyShared<TensorViewARM>;
};
using SharedTensorViewARM = SharedHandle<TensorViewARM>;
//=== VK_NV_optical_flow ===
template <>
class SharedHandleTraits<OpticalFlowSessionNV>

View file

@ -4541,6 +4541,112 @@ VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectDriverLoadingListL
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG>::value, "DirectDriverLoadingListLUNARG is not nothrow_move_constructible!" );
//=== VK_ARM_tensors ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorARM ) == sizeof( VkTensorARM ), "handle and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible<VULKAN_HPP_NAMESPACE::TensorARM>::value, "TensorARM is not copy_constructible!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorARM>::value, "TensorARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorViewARM ) == sizeof( VkTensorViewARM ), "handle and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible<VULKAN_HPP_NAMESPACE::TensorViewARM>::value, "TensorViewARM is not copy_constructible!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorViewARM>::value, "TensorViewARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorDescriptionARM ) == sizeof( VkTensorDescriptionARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TensorDescriptionARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorDescriptionARM>::value, "TensorDescriptionARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorCreateInfoARM ) == sizeof( VkTensorCreateInfoARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TensorCreateInfoARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorCreateInfoARM>::value, "TensorCreateInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM ) == sizeof( VkTensorViewCreateInfoARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorViewCreateInfoARM>::value, "TensorViewCreateInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM ) == sizeof( VkTensorMemoryRequirementsInfoARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorMemoryRequirementsInfoARM>::value, "TensorMemoryRequirementsInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM ) == sizeof( VkBindTensorMemoryInfoARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindTensorMemoryInfoARM>::value, "BindTensorMemoryInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetTensorARM ) == sizeof( VkWriteDescriptorSetTensorARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetTensorARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetTensorARM>::value, "WriteDescriptorSetTensorARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorFormatPropertiesARM ) == sizeof( VkTensorFormatPropertiesARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TensorFormatPropertiesARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorFormatPropertiesARM>::value, "TensorFormatPropertiesARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorPropertiesARM ) == sizeof( VkPhysicalDeviceTensorPropertiesARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorPropertiesARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorPropertiesARM>::value, "PhysicalDeviceTensorPropertiesARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM ) == sizeof( VkTensorMemoryBarrierARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorMemoryBarrierARM>::value, "TensorMemoryBarrierARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorDependencyInfoARM ) == sizeof( VkTensorDependencyInfoARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TensorDependencyInfoARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorDependencyInfoARM>::value, "TensorDependencyInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorFeaturesARM ) == sizeof( VkPhysicalDeviceTensorFeaturesARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorFeaturesARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTensorFeaturesARM>::value, "PhysicalDeviceTensorFeaturesARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM ) == sizeof( VkDeviceTensorMemoryRequirementsARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceTensorMemoryRequirementsARM>::value, "DeviceTensorMemoryRequirementsARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyTensorInfoARM ) == sizeof( VkCopyTensorInfoARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyTensorInfoARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyTensorInfoARM>::value, "CopyTensorInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorCopyARM ) == sizeof( VkTensorCopyARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TensorCopyARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorCopyARM>::value, "TensorCopyARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoTensorARM ) == sizeof( VkMemoryDedicatedAllocateInfoTensorARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoTensorARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoTensorARM>::value, "MemoryDedicatedAllocateInfoTensorARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM ) == sizeof( VkPhysicalDeviceExternalTensorInfoARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalTensorInfoARM>::value, "PhysicalDeviceExternalTensorInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM ) == sizeof( VkExternalTensorPropertiesARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalTensorPropertiesARM>::value, "ExternalTensorPropertiesARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryTensorCreateInfoARM ) == sizeof( VkExternalMemoryTensorCreateInfoARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryTensorCreateInfoARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryTensorCreateInfoARM>::value, "ExternalMemoryTensorCreateInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorFeaturesARM ) == sizeof( VkPhysicalDeviceDescriptorBufferTensorFeaturesARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorFeaturesARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorFeaturesARM>::value, "PhysicalDeviceDescriptorBufferTensorFeaturesARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorPropertiesARM ) == sizeof( VkPhysicalDeviceDescriptorBufferTensorPropertiesARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorPropertiesARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferTensorPropertiesARM>::value, "PhysicalDeviceDescriptorBufferTensorPropertiesARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorGetTensorInfoARM ) == sizeof( VkDescriptorGetTensorInfoARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorGetTensorInfoARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorGetTensorInfoARM>::value, "DescriptorGetTensorInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM ) == sizeof( VkTensorCaptureDescriptorDataInfoARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorCaptureDescriptorDataInfoARM>::value, "TensorCaptureDescriptorDataInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM ) == sizeof( VkTensorViewCaptureDescriptorDataInfoARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TensorViewCaptureDescriptorDataInfoARM>::value, "TensorViewCaptureDescriptorDataInfoARM is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FrameBoundaryTensorsARM ) == sizeof( VkFrameBoundaryTensorsARM ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FrameBoundaryTensorsARM>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FrameBoundaryTensorsARM>::value, "FrameBoundaryTensorsARM is not nothrow_move_constructible!" );
//=== VK_EXT_shader_module_identifier ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT ), "struct and wrapper have different size!" );
@ -4635,6 +4741,34 @@ VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoA
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD>::value, "AntiLagPresentationInfoAMD is not nothrow_move_constructible!" );
//=== VK_KHR_present_id2 ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentId2KHR ) == sizeof( VkSurfaceCapabilitiesPresentId2KHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentId2KHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentId2KHR>::value, "SurfaceCapabilitiesPresentId2KHR is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentId2KHR ) == sizeof( VkPresentId2KHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentId2KHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentId2KHR>::value, "PresentId2KHR is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentId2FeaturesKHR ) == sizeof( VkPhysicalDevicePresentId2FeaturesKHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentId2FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentId2FeaturesKHR>::value, "PhysicalDevicePresentId2FeaturesKHR is not nothrow_move_constructible!" );
//=== VK_KHR_present_wait2 ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentWait2KHR ) == sizeof( VkSurfaceCapabilitiesPresentWait2KHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentWait2KHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentWait2KHR>::value, "SurfaceCapabilitiesPresentWait2KHR is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWait2FeaturesKHR ) == sizeof( VkPhysicalDevicePresentWait2FeaturesKHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWait2FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWait2FeaturesKHR>::value, "PhysicalDevicePresentWait2FeaturesKHR is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR ) == sizeof( VkPresentWait2InfoKHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentWait2InfoKHR>::value, "PresentWait2InfoKHR is not nothrow_move_constructible!" );
//=== VK_KHR_ray_tracing_position_fetch ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR ), "struct and wrapper have different size!" );
@ -4981,6 +5115,24 @@ VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateContro
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR>::value, "VideoEncodeAV1RateControlLayerInfoKHR is not nothrow_move_constructible!" );
//=== VK_KHR_video_decode_vp9 ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoDecodeVP9FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoDecodeVP9FeaturesKHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoDecodeVP9FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoDecodeVP9FeaturesKHR>::value, "PhysicalDeviceVideoDecodeVP9FeaturesKHR is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeVP9ProfileInfoKHR ) == sizeof( VkVideoDecodeVP9ProfileInfoKHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeVP9ProfileInfoKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeVP9ProfileInfoKHR>::value, "VideoDecodeVP9ProfileInfoKHR is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeVP9CapabilitiesKHR ) == sizeof( VkVideoDecodeVP9CapabilitiesKHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeVP9CapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeVP9CapabilitiesKHR>::value, "VideoDecodeVP9CapabilitiesKHR is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeVP9PictureInfoKHR ) == sizeof( VkVideoDecodeVP9PictureInfoKHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeVP9PictureInfoKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeVP9PictureInfoKHR>::value, "VideoDecodeVP9PictureInfoKHR is not nothrow_move_constructible!" );
//=== VK_KHR_video_maintenance1 ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoMaintenance1FeaturesKHR ), "struct and wrapper have different size!" );
@ -5047,6 +5199,16 @@ VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachment
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT>::value, "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
//=== VK_KHR_unified_image_layouts ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUnifiedImageLayoutsFeaturesKHR ) == sizeof( VkPhysicalDeviceUnifiedImageLayoutsFeaturesKHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceUnifiedImageLayoutsFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceUnifiedImageLayoutsFeaturesKHR>::value, "PhysicalDeviceUnifiedImageLayoutsFeaturesKHR is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentFeedbackLoopInfoEXT ) == sizeof( VkAttachmentFeedbackLoopInfoEXT ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentFeedbackLoopInfoEXT>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentFeedbackLoopInfoEXT>::value, "AttachmentFeedbackLoopInfoEXT is not nothrow_move_constructible!" );
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
//=== VK_QNX_external_memory_screen_buffer ===
@ -5247,6 +5409,12 @@ VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderRepl
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT>::value, "PhysicalDeviceShaderReplicatedCompositesFeaturesEXT is not nothrow_move_constructible!" );
//=== VK_EXT_shader_float8 ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat8FeaturesEXT ) == sizeof( VkPhysicalDeviceShaderFloat8FeaturesEXT ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat8FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat8FeaturesEXT>::value, "PhysicalDeviceShaderFloat8FeaturesEXT is not nothrow_move_constructible!" );
//=== VK_NV_ray_tracing_validation ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV ) == sizeof( VkPhysicalDeviceRayTracingValidationFeaturesNV ), "struct and wrapper have different size!" );
@ -5501,6 +5669,20 @@ VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthClampRangeEXT ) ==
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DepthClampRangeEXT>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DepthClampRangeEXT>::value, "DepthClampRangeEXT is not nothrow_move_constructible!" );
//=== VK_KHR_maintenance9 ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance9FeaturesKHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9FeaturesKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9FeaturesKHR>::value, "PhysicalDeviceMaintenance9FeaturesKHR is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance9PropertiesKHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9PropertiesKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance9PropertiesKHR>::value, "PhysicalDeviceMaintenance9PropertiesKHR is not nothrow_move_constructible!" );
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyOwnershipTransferPropertiesKHR ) == sizeof( VkQueueFamilyOwnershipTransferPropertiesKHR ), "struct and wrapper have different size!" );
VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyOwnershipTransferPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyOwnershipTransferPropertiesKHR>::value, "QueueFamilyOwnershipTransferPropertiesKHR is not nothrow_move_constructible!" );
//=== VK_KHR_video_maintenance2 ===
VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance2FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoMaintenance2FeaturesKHR ), "struct and wrapper have different size!" );

File diff suppressed because it is too large Load diff

View file

@ -136,6 +136,7 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & ImageUsageFlagBits::eInvocationMaskHUAWEI ) result += " InvocationMaskHUAWEI |";
if ( value & ImageUsageFlagBits::eSampleWeightQCOM ) result += " SampleWeightQCOM |";
if ( value & ImageUsageFlagBits::eSampleBlockMatchQCOM ) result += " SampleBlockMatchQCOM |";
if ( value & ImageUsageFlagBits::eTensorAliasingARM ) result += " TensorAliasingARM |";
if ( value & ImageUsageFlagBits::eTileMemoryQCOM ) result += " TileMemoryQCOM |";
if ( value & ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR ) result += " VideoEncodeQuantizationDeltaMapKHR |";
if ( value & ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR ) result += " VideoEncodeEmphasisMapKHR |";
@ -398,9 +399,16 @@ namespace VULKAN_HPP_NAMESPACE
return result;
}
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryPoolCreateFlags )
VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags value )
{
return "{}";
std::string result = "{";
if ( value & QueryPoolCreateFlagBits::eResetKHR ) result += " ResetKHR |";
if ( result.size() > 1 )
result.back() = '}';
else
result = "{}";
return result;
}
VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value )
@ -812,6 +820,7 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & DependencyFlagBits::eViewLocal ) result += " ViewLocal |";
if ( value & DependencyFlagBits::eFeedbackLoopEXT ) result += " FeedbackLoopEXT |";
if ( value & DependencyFlagBits::eQueueFamilyOwnershipTransferUseAllStagesKHR ) result += " QueueFamilyOwnershipTransferUseAllStagesKHR |";
if ( value & DependencyFlagBits::eAsymmetricEventKHR ) result += " AsymmetricEventKHR |";
if ( result.size() > 1 )
result.back() = '}';
@ -1405,6 +1414,8 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & FormatFeatureFlagBits2::eWeightSampledImageQCOM ) result += " WeightSampledImageQCOM |";
if ( value & FormatFeatureFlagBits2::eBlockMatchingQCOM ) result += " BlockMatchingQCOM |";
if ( value & FormatFeatureFlagBits2::eBoxFilterSampledQCOM ) result += " BoxFilterSampledQCOM |";
if ( value & FormatFeatureFlagBits2::eTensorShaderARM ) result += " TensorShaderARM |";
if ( value & FormatFeatureFlagBits2::eTensorImageAliasingARM ) result += " TensorImageAliasingARM |";
if ( value & FormatFeatureFlagBits2::eOpticalFlowImageNV ) result += " OpticalFlowImageNV |";
if ( value & FormatFeatureFlagBits2::eOpticalFlowVectorNV ) result += " OpticalFlowVectorNV |";
if ( value & FormatFeatureFlagBits2::eOpticalFlowCostNV ) result += " OpticalFlowCostNV |";
@ -1561,6 +1572,8 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & SwapchainCreateFlagBitsKHR::eProtected ) result += " Protected |";
if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += " MutableFormat |";
if ( value & SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT ) result += " DeferredMemoryAllocationEXT |";
if ( value & SwapchainCreateFlagBitsKHR::ePresentId2 ) result += " PresentId2 |";
if ( value & SwapchainCreateFlagBitsKHR::ePresentWait2 ) result += " PresentWait2 |";
if ( result.size() > 1 )
result.back() = '}';
@ -1705,6 +1718,7 @@ namespace VULKAN_HPP_NAMESPACE
if ( value & VideoCodecOperationFlagBitsKHR::eDecodeH265 ) result += " DecodeH265 |";
if ( value & VideoCodecOperationFlagBitsKHR::eDecodeAv1 ) result += " DecodeAv1 |";
if ( value & VideoCodecOperationFlagBitsKHR::eEncodeAv1 ) result += " EncodeAv1 |";
if ( value & VideoCodecOperationFlagBitsKHR::eDecodeVp9 ) result += " DecodeVp9 |";
if ( result.size() > 1 )
result.back() = '}';
@ -2781,6 +2795,49 @@ namespace VULKAN_HPP_NAMESPACE
return "{}";
}
//=== VK_ARM_tensors ===
VULKAN_HPP_INLINE std::string to_string( TensorCreateFlagsARM value )
{
std::string result = "{";
if ( value & TensorCreateFlagBitsARM::eMutableFormat ) result += " MutableFormat |";
if ( value & TensorCreateFlagBitsARM::eProtected ) result += " Protected |";
if ( value & TensorCreateFlagBitsARM::eDescriptorBufferCaptureReplay ) result += " DescriptorBufferCaptureReplay |";
if ( result.size() > 1 )
result.back() = '}';
else
result = "{}";
return result;
}
VULKAN_HPP_INLINE std::string to_string( TensorViewCreateFlagsARM value )
{
std::string result = "{";
if ( value & TensorViewCreateFlagBitsARM::eDescriptorBufferCaptureReplay ) result += " DescriptorBufferCaptureReplay |";
if ( result.size() > 1 )
result.back() = '}';
else
result = "{}";
return result;
}
VULKAN_HPP_INLINE std::string to_string( TensorUsageFlagsARM value )
{
std::string result = "{";
if ( value & TensorUsageFlagBitsARM::eShader ) result += " Shader |";
if ( value & TensorUsageFlagBitsARM::eTransferSrc ) result += " TransferSrc |";
if ( value & TensorUsageFlagBitsARM::eTransferDst ) result += " TransferDst |";
if ( value & TensorUsageFlagBitsARM::eImageAliasing ) result += " ImageAliasing |";
if ( result.size() > 1 )
result.back() = '}';
else
result = "{}";
return result;
}
//=== VK_NV_optical_flow ===
VULKAN_HPP_INLINE std::string to_string( OpticalFlowUsageFlagsNV value )
@ -3994,6 +4051,30 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eRenderPassSubpassFeedbackCreateInfoEXT : return "RenderPassSubpassFeedbackCreateInfoEXT";
case StructureType::eDirectDriverLoadingInfoLUNARG : return "DirectDriverLoadingInfoLUNARG";
case StructureType::eDirectDriverLoadingListLUNARG : return "DirectDriverLoadingListLUNARG";
case StructureType::eTensorCreateInfoARM : return "TensorCreateInfoARM";
case StructureType::eTensorViewCreateInfoARM : return "TensorViewCreateInfoARM";
case StructureType::eBindTensorMemoryInfoARM : return "BindTensorMemoryInfoARM";
case StructureType::eWriteDescriptorSetTensorARM : return "WriteDescriptorSetTensorARM";
case StructureType::ePhysicalDeviceTensorPropertiesARM : return "PhysicalDeviceTensorPropertiesARM";
case StructureType::eTensorFormatPropertiesARM : return "TensorFormatPropertiesARM";
case StructureType::eTensorDescriptionARM : return "TensorDescriptionARM";
case StructureType::eTensorMemoryRequirementsInfoARM : return "TensorMemoryRequirementsInfoARM";
case StructureType::eTensorMemoryBarrierARM : return "TensorMemoryBarrierARM";
case StructureType::ePhysicalDeviceTensorFeaturesARM : return "PhysicalDeviceTensorFeaturesARM";
case StructureType::eDeviceTensorMemoryRequirementsARM : return "DeviceTensorMemoryRequirementsARM";
case StructureType::eCopyTensorInfoARM : return "CopyTensorInfoARM";
case StructureType::eTensorCopyARM : return "TensorCopyARM";
case StructureType::eTensorDependencyInfoARM : return "TensorDependencyInfoARM";
case StructureType::eMemoryDedicatedAllocateInfoTensorARM : return "MemoryDedicatedAllocateInfoTensorARM";
case StructureType::ePhysicalDeviceExternalTensorInfoARM : return "PhysicalDeviceExternalTensorInfoARM";
case StructureType::eExternalTensorPropertiesARM : return "ExternalTensorPropertiesARM";
case StructureType::eExternalMemoryTensorCreateInfoARM : return "ExternalMemoryTensorCreateInfoARM";
case StructureType::ePhysicalDeviceDescriptorBufferTensorFeaturesARM : return "PhysicalDeviceDescriptorBufferTensorFeaturesARM";
case StructureType::ePhysicalDeviceDescriptorBufferTensorPropertiesARM : return "PhysicalDeviceDescriptorBufferTensorPropertiesARM";
case StructureType::eDescriptorGetTensorInfoARM : return "DescriptorGetTensorInfoARM";
case StructureType::eTensorCaptureDescriptorDataInfoARM : return "TensorCaptureDescriptorDataInfoARM";
case StructureType::eTensorViewCaptureDescriptorDataInfoARM : return "TensorViewCaptureDescriptorDataInfoARM";
case StructureType::eFrameBoundaryTensorsARM : return "FrameBoundaryTensorsARM";
case StructureType::ePhysicalDeviceShaderModuleIdentifierFeaturesEXT : return "PhysicalDeviceShaderModuleIdentifierFeaturesEXT";
case StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT : return "PhysicalDeviceShaderModuleIdentifierPropertiesEXT";
case StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT : return "PipelineShaderStageModuleIdentifierCreateInfoEXT";
@ -4015,6 +4096,12 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceAntiLagFeaturesAMD : return "PhysicalDeviceAntiLagFeaturesAMD";
case StructureType::eAntiLagDataAMD : return "AntiLagDataAMD";
case StructureType::eAntiLagPresentationInfoAMD : return "AntiLagPresentationInfoAMD";
case StructureType::eSurfaceCapabilitiesPresentId2KHR : return "SurfaceCapabilitiesPresentId2KHR";
case StructureType::ePresentId2KHR : return "PresentId2KHR";
case StructureType::ePhysicalDevicePresentId2FeaturesKHR : return "PhysicalDevicePresentId2FeaturesKHR";
case StructureType::eSurfaceCapabilitiesPresentWait2KHR : return "SurfaceCapabilitiesPresentWait2KHR";
case StructureType::ePhysicalDevicePresentWait2FeaturesKHR : return "PhysicalDevicePresentWait2FeaturesKHR";
case StructureType::ePresentWait2InfoKHR : return "PresentWait2InfoKHR";
case StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR : return "PhysicalDeviceRayTracingPositionFetchFeaturesKHR";
case StructureType::ePhysicalDeviceShaderObjectFeaturesEXT : return "PhysicalDeviceShaderObjectFeaturesEXT";
case StructureType::ePhysicalDeviceShaderObjectPropertiesEXT : return "PhysicalDeviceShaderObjectPropertiesEXT";
@ -4083,6 +4170,10 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR : return "VideoEncodeAv1QualityLevelPropertiesKHR";
case StructureType::eVideoEncodeAv1SessionCreateInfoKHR : return "VideoEncodeAv1SessionCreateInfoKHR";
case StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR : return "VideoEncodeAv1GopRemainingFrameInfoKHR";
case StructureType::ePhysicalDeviceVideoDecodeVp9FeaturesKHR : return "PhysicalDeviceVideoDecodeVp9FeaturesKHR";
case StructureType::eVideoDecodeVp9CapabilitiesKHR : return "VideoDecodeVp9CapabilitiesKHR";
case StructureType::eVideoDecodeVp9PictureInfoKHR : return "VideoDecodeVp9PictureInfoKHR";
case StructureType::eVideoDecodeVp9ProfileInfoKHR : return "VideoDecodeVp9ProfileInfoKHR";
case StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR : return "PhysicalDeviceVideoMaintenance1FeaturesKHR";
case StructureType::eVideoInlineQueryInfoKHR : return "VideoInlineQueryInfoKHR";
case StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV : return "PhysicalDevicePerStageDescriptorSetFeaturesNV";
@ -4096,6 +4187,8 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM : return "SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM";
case StructureType::ePhysicalDeviceCubicClampFeaturesQCOM : return "PhysicalDeviceCubicClampFeaturesQCOM";
case StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT : return "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT";
case StructureType::ePhysicalDeviceUnifiedImageLayoutsFeaturesKHR : return "PhysicalDeviceUnifiedImageLayoutsFeaturesKHR";
case StructureType::eAttachmentFeedbackLoopInfoEXT : return "AttachmentFeedbackLoopInfoEXT";
#if defined( VK_USE_PLATFORM_SCREEN_QNX )
case StructureType::eScreenBufferPropertiesQNX : return "ScreenBufferPropertiesQNX";
case StructureType::eScreenBufferFormatPropertiesQNX : return "ScreenBufferFormatPropertiesQNX";
@ -4139,6 +4232,7 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR : return "PhysicalDeviceLayeredApiVulkanPropertiesKHR";
case StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV : return "PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV";
case StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT : return "PhysicalDeviceShaderReplicatedCompositesFeaturesEXT";
case StructureType::ePhysicalDeviceShaderFloat8FeaturesEXT : return "PhysicalDeviceShaderFloat8FeaturesEXT";
case StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV : return "PhysicalDeviceRayTracingValidationFeaturesNV";
case StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV : return "PhysicalDeviceClusterAccelerationStructureFeaturesNV";
case StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV : return "PhysicalDeviceClusterAccelerationStructurePropertiesNV";
@ -4175,6 +4269,9 @@ namespace VULKAN_HPP_NAMESPACE
case StructureType::eImageAlignmentControlCreateInfoMESA : return "ImageAlignmentControlCreateInfoMESA";
case StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT : return "PhysicalDeviceDepthClampControlFeaturesEXT";
case StructureType::ePipelineViewportDepthClampControlCreateInfoEXT : return "PipelineViewportDepthClampControlCreateInfoEXT";
case StructureType::ePhysicalDeviceMaintenance9FeaturesKHR : return "PhysicalDeviceMaintenance9FeaturesKHR";
case StructureType::ePhysicalDeviceMaintenance9PropertiesKHR : return "PhysicalDeviceMaintenance9PropertiesKHR";
case StructureType::eQueueFamilyOwnershipTransferPropertiesKHR : return "QueueFamilyOwnershipTransferPropertiesKHR";
case StructureType::ePhysicalDeviceVideoMaintenance2FeaturesKHR : return "PhysicalDeviceVideoMaintenance2FeaturesKHR";
case StructureType::eVideoDecodeH264InlineSessionParametersInfoKHR : return "VideoDecodeH264InlineSessionParametersInfoKHR";
case StructureType::eVideoDecodeH265InlineSessionParametersInfoKHR : return "VideoDecodeH265InlineSessionParametersInfoKHR";
@ -4278,6 +4375,8 @@ namespace VULKAN_HPP_NAMESPACE
case ObjectType::eBufferCollectionFUCHSIA : return "BufferCollectionFUCHSIA";
#endif /*VK_USE_PLATFORM_FUCHSIA*/
case ObjectType::eMicromapEXT : return "MicromapEXT";
case ObjectType::eTensorARM : return "TensorARM";
case ObjectType::eTensorViewARM : return "TensorViewARM";
case ObjectType::eOpticalFlowSessionNV : return "OpticalFlowSessionNV";
case ObjectType::eShaderEXT : return "ShaderEXT";
case ObjectType::ePipelineBinaryKHR : return "PipelineBinaryKHR";
@ -4561,6 +4660,7 @@ namespace VULKAN_HPP_NAMESPACE
case Format::ePvrtc14BppSrgbBlockIMG : return "Pvrtc14BppSrgbBlockIMG";
case Format::ePvrtc22BppSrgbBlockIMG : return "Pvrtc22BppSrgbBlockIMG";
case Format::ePvrtc24BppSrgbBlockIMG : return "Pvrtc24BppSrgbBlockIMG";
case Format::eR8BoolARM : return "R8BoolARM";
case Format::eR16G16Sfixed5NV : return "R16G16Sfixed5NV";
case Format::eR10X6UintPack16ARM : return "R10X6UintPack16ARM";
case Format::eR10X6G10X6Uint2Pack16ARM : return "R10X6G10X6Uint2Pack16ARM";
@ -4704,6 +4804,7 @@ namespace VULKAN_HPP_NAMESPACE
case ImageUsageFlagBits::eInvocationMaskHUAWEI : return "InvocationMaskHUAWEI";
case ImageUsageFlagBits::eSampleWeightQCOM : return "SampleWeightQCOM";
case ImageUsageFlagBits::eSampleBlockMatchQCOM : return "SampleBlockMatchQCOM";
case ImageUsageFlagBits::eTensorAliasingARM : return "TensorAliasingARM";
case ImageUsageFlagBits::eTileMemoryQCOM : return "TileMemoryQCOM";
case ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR : return "VideoEncodeQuantizationDeltaMapKHR";
case ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR : return "VideoEncodeEmphasisMapKHR";
@ -4995,6 +5096,17 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryPoolCreateFlagBits value )
{
switch ( value )
{
case QueryPoolCreateFlagBits::eResetKHR : return "ResetKHR";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryResultFlagBits value )
{
switch ( value )
@ -5037,12 +5149,6 @@ namespace VULKAN_HPP_NAMESPACE
}
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( QueryPoolCreateFlagBits )
{
return "(void)";
}
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( BufferCreateFlagBits value )
{
switch ( value )
@ -5150,6 +5256,7 @@ namespace VULKAN_HPP_NAMESPACE
case ImageLayout::eVideoEncodeSrcKHR : return "VideoEncodeSrcKHR";
case ImageLayout::eVideoEncodeDpbKHR : return "VideoEncodeDpbKHR";
case ImageLayout::eAttachmentFeedbackLoopOptimalEXT : return "AttachmentFeedbackLoopOptimalEXT";
case ImageLayout::eTensorAliasingARM : return "TensorAliasingARM";
case ImageLayout::eVideoEncodeQuantizationMapKHR : return "VideoEncodeQuantizationMapKHR";
case ImageLayout::eZeroInitializedEXT : return "ZeroInitializedEXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
@ -5829,6 +5936,7 @@ namespace VULKAN_HPP_NAMESPACE
case DescriptorType::eAccelerationStructureNV : return "AccelerationStructureNV";
case DescriptorType::eSampleWeightImageQCOM : return "SampleWeightImageQCOM";
case DescriptorType::eBlockMatchImageQCOM : return "BlockMatchImageQCOM";
case DescriptorType::eTensorARM : return "TensorARM";
case DescriptorType::eMutableEXT : return "MutableEXT";
case DescriptorType::ePartitionedAccelerationStructureNV : return "PartitionedAccelerationStructureNV";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
@ -5929,6 +6037,7 @@ namespace VULKAN_HPP_NAMESPACE
case DependencyFlagBits::eViewLocal : return "ViewLocal";
case DependencyFlagBits::eFeedbackLoopEXT : return "FeedbackLoopEXT";
case DependencyFlagBits::eQueueFamilyOwnershipTransferUseAllStagesKHR : return "QueueFamilyOwnershipTransferUseAllStagesKHR";
case DependencyFlagBits::eAsymmetricEventKHR : return "AsymmetricEventKHR";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
@ -6706,6 +6815,8 @@ namespace VULKAN_HPP_NAMESPACE
case FormatFeatureFlagBits2::eWeightSampledImageQCOM : return "WeightSampledImageQCOM";
case FormatFeatureFlagBits2::eBlockMatchingQCOM : return "BlockMatchingQCOM";
case FormatFeatureFlagBits2::eBoxFilterSampledQCOM : return "BoxFilterSampledQCOM";
case FormatFeatureFlagBits2::eTensorShaderARM : return "TensorShaderARM";
case FormatFeatureFlagBits2::eTensorImageAliasingARM : return "TensorImageAliasingARM";
case FormatFeatureFlagBits2::eOpticalFlowImageNV : return "OpticalFlowImageNV";
case FormatFeatureFlagBits2::eOpticalFlowVectorNV : return "OpticalFlowVectorNV";
case FormatFeatureFlagBits2::eOpticalFlowCostNV : return "OpticalFlowCostNV";
@ -6974,6 +7085,8 @@ namespace VULKAN_HPP_NAMESPACE
case SwapchainCreateFlagBitsKHR::eProtected : return "Protected";
case SwapchainCreateFlagBitsKHR::eMutableFormat : return "MutableFormat";
case SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT : return "DeferredMemoryAllocationEXT";
case SwapchainCreateFlagBitsKHR::ePresentId2 : return "PresentId2";
case SwapchainCreateFlagBitsKHR::ePresentWait2 : return "PresentWait2";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
@ -7171,6 +7284,7 @@ namespace VULKAN_HPP_NAMESPACE
case VideoCodecOperationFlagBitsKHR::eDecodeH265 : return "DecodeH265";
case VideoCodecOperationFlagBitsKHR::eDecodeAv1 : return "DecodeAv1";
case VideoCodecOperationFlagBitsKHR::eEncodeAv1 : return "EncodeAv1";
case VideoCodecOperationFlagBitsKHR::eDecodeVp9 : return "DecodeVp9";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
@ -9102,6 +9216,58 @@ namespace VULKAN_HPP_NAMESPACE
return "(void)";
}
//=== VK_ARM_tensors ===
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( TensorCreateFlagBitsARM value )
{
switch ( value )
{
case TensorCreateFlagBitsARM::eMutableFormat : return "MutableFormat";
case TensorCreateFlagBitsARM::eProtected : return "Protected";
case TensorCreateFlagBitsARM::eDescriptorBufferCaptureReplay : return "DescriptorBufferCaptureReplay";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( TensorViewCreateFlagBitsARM value )
{
switch ( value )
{
case TensorViewCreateFlagBitsARM::eDescriptorBufferCaptureReplay : return "DescriptorBufferCaptureReplay";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( TensorUsageFlagBitsARM value )
{
switch ( value )
{
case TensorUsageFlagBitsARM::eShader : return "Shader";
case TensorUsageFlagBitsARM::eTransferSrc : return "TransferSrc";
case TensorUsageFlagBitsARM::eTransferDst : return "TransferDst";
case TensorUsageFlagBitsARM::eImageAliasing : return "ImageAliasing";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( TensorTilingARM value )
{
switch ( value )
{
case TensorTilingARM::eOptimal : return "Optimal";
case TensorTilingARM::eLinear : return "Linear";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
//=== VK_NV_optical_flow ===
@ -9304,6 +9470,8 @@ namespace VULKAN_HPP_NAMESPACE
case ComponentTypeKHR::eUint8PackedNV : return "Uint8PackedNV";
case ComponentTypeKHR::eFloatE4M3NV : return "FloatE4M3NV";
case ComponentTypeKHR::eFloatE5M2NV : return "FloatE5M2NV";
case ComponentTypeKHR::eFloat8E4M3EXT : return "Float8E4M3EXT";
case ComponentTypeKHR::eFloat8E5M2EXT : return "Float8E5M2EXT";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
@ -9774,6 +9942,20 @@ namespace VULKAN_HPP_NAMESPACE
}
//=== VK_KHR_maintenance9 ===
VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string to_string( DefaultVertexAttributeValueKHR value )
{
switch ( value )
{
case DefaultVertexAttributeValueKHR::eZeroZeroZeroZero : return "ZeroZeroZeroZero";
case DefaultVertexAttributeValueKHR::eZeroZeroZeroOne : return "ZeroZeroZeroOne";
default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
}
}
} // namespace VULKAN_HPP_NAMESPACE